code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
import pytest
import os
from tempfile import mkdtemp
from wasp_launcher.version import revision
@pytest.fixture()
def cwd(request):
curdir = os.getcwd()
def fin():
os.chdir(curdir)
request.addfinalizer(fin)
@pytest.mark.usefixtures('cwd')
def test_revision(tmpdir):
revision()
os.chdir(tmpdir.strpath)
assert(revision() == '--')
| a1ezzz/wasp-launcher | tests/wasp_launcher_version_test.py | Python | lgpl-3.0 | 368 |
#
# Copyright (C) 2010-2014 Stanislav Bohm
# 2011 Ondrej Garncarz
# 2012 Martin Surkovsky
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Kaira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kaira. If not, see <http://www.gnu.org/licenses/>.
#
import utils
from utils import xml_int, xml_str
import xml.etree.ElementTree as xml
import citems
import undo
import tracing
class Net:
def __init__(self, project, name, id=None):
if id is None:
self.id = project.new_id()
else:
self.id = id
self.project = project
self.name = name
self.items = []
self.change_callback = lambda n: None
self.change_item_callback = None
self.undo_manager = undo.UndoManager()
def get_name(self):
return self.name
def get_id(self):
return self.id
def is_build_net(self):
return self.project.build_net == self
def set_change_callback(self, callback):
self.change_callback = callback
def new_id(self):
return self.project.new_id()
def add_item(self, item):
if item.id is None:
item.id = self.new_id()
self.items.append(item)
self.changed()
def set_name(self, name):
self.name = name
self.changed()
def changed(self):
self.change_callback(self)
def changed_item(self, item):
if self.change_item_callback:
self.change_item_callback(self, item)
def add_place(self, position, id = None):
place = Place(self, id, position)
self.add_item(place)
self.changed()
return place
def add_transition(self, position, id = None):
transition = Transition(self, id, position)
self.add_item(transition)
self.changed()
return transition
def add_edge(self, item1, item2, points, id = None):
edge = Edge(self, id, item1, item2, points)
self.add_item(edge)
self.changed()
return edge
def add_area(self, position, size, id = None):
area = NetArea(self, id, position, size)
self.add_item(area)
self.changed()
return area
def as_xml(self):
e = xml.Element("net")
e.set("name", self.name)
e.set("id", str(self.id))
for item in self.items:
e.append(item.as_xml())
return e
def copy(self):
xml = self.as_xml()
return load_net(xml, self.project, NewIdLoader(self.project))
def get_item(self, id):
for i in self.items:
if i.get_id() == id:
return i
return None
def places(self):
return [ item for item in self.items if item.is_place() ]
def transitions(self):
return [ item for item in self.items if item.is_transition() ]
def edges(self):
return [ item for item in self.items if item.is_edge() ]
def areas(self):
return [ item for item in self.items if item.is_area() ]
def inodes(self):
return [ item for item in self.items if item.is_inode() ]
def export_xml(self, build_config):
e = xml.Element("net")
e.set("name", self.name)
e.set("id", str(self.id))
for place in self.places():
e.append(place.export_xml(build_config))
for transition in self.transitions():
e.append(transition.export_xml(build_config))
for area in self.areas():
e.append(area.export_xml())
return e
def item_by_id(self, id):
for item in self.items:
if item.id == id:
return item
return None
def contains(self, item):
return item in self.items
def delete_item(self, item):
self.items.remove(item)
self.changed()
def edges_from(self, item, postprocess=False):
edges = [ i for i in self.items
if i.is_edge() and i.from_item == item ]
if postprocess:
edges = [ i.export_form() for i in edges ]
edges += [ i.make_complement(export_form=True)
for i in self.edges_to(item) if i.is_bidirectional() ]
return edges
def edges_to(self, item, postprocess=False):
edges = [ i for i in self.items if i.is_edge() and i.to_item == item ]
if postprocess:
edges = [ i.export_form() for i in edges ]
edges += [ i.make_complement(export_form=True)
for i in self.edges_from(item) if i.is_bidirectional() ]
return edges
def edges_of(self, item):
return [ i for i in self.items
if i.is_edge() and (i.to_item == item or i.from_item == item) ]
def edges_out(self):
return [ i for i in self.items
if i.is_edge() and (i.from_item.is_transition() or i.is_bidirectional()) ]
def trace_nothing(self):
for i in self.transitions():
i.trace_fire = False
for i in self.places():
i.trace_tokens = False
self.changed()
def trace_everything(self):
for i in self.transitions():
i.trace_fire = True
for i in self.places():
i.trace_tokens = True
self.changed()
class NetItem(object):
def __init__(self, net, id):
self.net = net
self.id = id
def get_id(self):
return self.id
def changed(self):
self.net.changed_item(self)
def is_place(self):
return False
def is_transition(self):
return False
def is_edge(self):
return False
def is_area(self):
return False
def is_inode(self):
return False
def delete(self):
self.net.delete_item(self)
return [ self ]
def create_xml_element(self, name):
element = xml.Element(name)
element.set("id", str(self.id))
return element
def get_canvas_items_dict(self, view_mode):
result = {}
for i in self.get_canvas_items(view_mode):
result[i.kind] = i
return result
def get_error_items(self):
result = []
messages = self.net.project.get_error_messages(self)
if not messages:
return result
items = self.get_canvas_items_dict(None)
for name in messages:
item = items.get(name)
if item is None:
# Key was not found, take first item
# For transition/place it is expected that "box" is returned
item = self.get_canvas_items()[0]
position = utils.vector_add(item.get_position(), item.size)
position = utils.vector_add(position, (0, 0))
placement = item.get_relative_placement(position)
error_item = citems.Text(None, "error", placement)
error_item.delegate_selection = item
error_item.background_color = (255, 0, 0)
error_item.border_color = (0, 0, 0)
error_item.align_y = 0
error_item.z_level = 20
error_item.text = messages[name][0]
result.append(error_item)
return result
class NetElement(NetItem):
code = ""
def __init__(self, net, id, position):
NetItem.__init__(self, net, id)
self.box = citems.ElementBox(
self,
"box",
citems.AbsPlacement(position),
self.default_size,
self.default_radius)
self.label_placement = self.box.get_relative_placement(
utils.vector_add_t(position, self.default_size, 0.5))
self.label_trace = citems.TraceLabel(self, "tracebox", self.label_placement)
self.label_trace.text_fn = self.get_trace_label_text
self.label_simrun = citems.SimRunLabel(self, "simrunbox", self.label_placement)
self.label_simrun.text_fn = self.get_simrun_label_text
self.label_verif = citems.VerifLabel(self, "verifbox", self.label_placement)
self.label_verif.text_fn = self.get_verif_label_text
def get_canvas_items(self, view_mode):
items = [ self.box ]
if view_mode == "tracing":
items.append(self.label_trace)
elif view_mode == "verif":
items.append(self.label_verif)
elif view_mode == "simrun":
items.append(self.label_simrun)
return items
def has_code(self):
return self.code.strip() != ""
def get_code(self):
return self.code
def set_code(self, code):
self.code = code
self.box.doubleborder = self.has_code()
self.changed()
def edges(self):
return self.net.edges_of(self)
def edges_from(self, postprocess=False):
return self.net.edges_from(self, postprocess)
def edges_to(self, postprocess=False):
return self.net.edges_to(self, postprocess)
def delete(self):
deleted = []
for edge in self.edges():
deleted += edge.delete()
deleted += NetItem.delete(self)
return deleted
def xml_code_element(self):
e = xml.Element("code")
e.text = self.code
return e
def area(self):
for area in self.net.areas():
if area.is_inside(self):
return area
def get_name(self):
return self.box.name
def get_name_or_id(self):
if not self.box.name:
return "#{0}".format(self.id)
return self.box.name
def set_name(self, name):
self.box.name = name
self.changed()
class Transition(NetElement):
default_size = (70, 36)
default_radius = 0
trace_fire = False
# Collective communication
collective = False
# Simrun options
time_substitution = False
time_substitution_code = ""
clock_substitution = False
clock_substitution_code = ""
clock = False
# Verif options
calls_quit = False
occurrence_analysis = False
occurrence_analysis_compare_process = False
occurrence_analysis_compare_binding = False
def __init__(self, net, id, position):
NetElement.__init__(self, net, id, position)
p = (position[0], position[1] - 20)
self.guard = citems.Text(self, "guard", self.box.get_relative_placement(p))
p = (position[0] + self.default_size[0] / 2 + 5, position[1] + 40)
self.root = citems.Text(self, "root", self.box.get_relative_placement(p))
self.root.format = "root({0})"
def get_canvas_items(self, view_mode):
items = NetElement.get_canvas_items(self, view_mode)
items.append(self.guard)
if self.collective:
items.append(self.root)
if self.clock:
p = utils.vector_add(self.box.get_position(), (-9, 7))
items.append(citems.ClockIcon(
self, "clock", self.box.get_relative_placement(p)))
return items
def set_collective(self, value):
self.collective = value
self.box.thicklines = value
self.changed()
def is_collective(self):
return self.collective
def set_root(self, value):
self.root.text = value
self.changed()
def get_root(self):
return self.root.text
def get_time_substitution(self):
return self.time_substitution
def set_time_substitution(self, value):
self.time_substitution = value
self.changed()
def get_time_substitution_code(self):
return self.time_substitution_code
def set_time_substitution_code(self, value):
self.time_substitution_code = value
self.changed()
def get_clock_substitution(self):
return self.clock_substitution
def set_clock_substitution(self, value):
self.clock_substitution = value
self.changed()
def get_clock_substitution_code(self):
return self.clock_substitution_code
def set_clock_substitution_code(self, value):
self.clock_substitution_code = value
self.changed()
def set_clock(self, value):
self.clock = value
def has_clock(self):
return self.clock
def get_priority(self):
return self.box.corner_text
def set_priority(self, priority):
self.box.corner_text = priority
self.changed()
def get_priroty(self):
return self.box.corner_text
def get_guard(self):
return self.guard.text
def set_guard(self, guard):
self.guard.text = guard
self.changed()
def is_transition(self):
return True
def is_immediate(self):
return not self.has_code()
def as_xml(self):
e = self.create_xml_element("transition")
e.set("name", self.box.name)
e.set("priority", self.get_priority())
position = self.box.get_position()
e.set("x", str(position[0]))
e.set("y", str(position[1]))
e.set("sx", str(self.box.size[0]))
e.set("sy", str(self.box.size[1]))
e.set("clock", str(self.has_clock()))
e.set("label-x", str(self.label_placement.get_position()[0]))
e.set("label-y", str(self.label_placement.get_position()[1]))
e.set("collective", str(self.collective))
e.append(canvastext_to_xml(self.guard, "guard"))
if self.has_code():
e.append(self.xml_code_element())
if self.trace_fire:
element = xml.Element("trace")
element.text = "fire"
e.append(element)
if self.time_substitution:
element = xml.Element("time-substitution")
element.text = self.time_substitution_code
e.append(element)
if self.clock_substitution:
element = xml.Element("clock-substitution")
element.text = self.clock_substitution_code
e.append(element)
if self.calls_quit:
element = xml.Element("verif-quit_flag")
element.text = "True"
e.append(element)
if self.occurrence_analysis:
element = xml.Element("verif-occurrence")
if self.occurrence_analysis:
element.set("process", str(self.occurrence_analysis_compare_process))
element.set("binding", str(self.occurrence_analysis_compare_binding))
e.append(element)
if self.collective:
e.append(canvastext_to_xml(self.root, "root"))
return e
def get_trace_label_text(self):
if self.trace_fire:
return "fire"
else:
return ""
def get_verif_label_text(self):
texts = []
if self.occurrence_analysis:
texts.append("occurrence")
if self.occurrence_analysis_compare_process:
texts[-1] += " +process"
if self.occurrence_analysis_compare_binding:
texts[-1] += " +binding"
if self.calls_quit:
texts.append("call quit")
return texts
def get_simrun_label_text(self):
items = []
if self.get_time_substitution():
items.append("time: {0}".format(self.get_time_substitution_code()))
if self.get_clock_substitution():
items.append("clock: {0}".format(self.get_clock_substitution_code()))
return "\n".join(items)
def export_xml(self, build_config):
e = self.create_xml_element("transition")
e.set("name", self.box.name)
e.set("guard", self.guard.text)
e.set("priority", self.get_priority())
e.set("clock", str(self.has_clock()))
e.set("collective", str(self.is_collective()))
if self.has_code():
e.append(self.xml_code_element())
if self.is_collective():
e.set("root", self.get_root())
for edge in self.edges_to(postprocess=True):
e.append(edge.create_xml_export_element("edge-in", build_config))
for edge in self.edges_from(postprocess=True):
e.append(edge.create_xml_export_element("edge-out", build_config))
if build_config.tracing:
# Because of the bug, always trace fire, even it is disabled and self.trace_fire:
element = xml.Element("trace")
element.text = "fire"
e.append(element)
if build_config.substitutions and self.time_substitution:
element = xml.Element("time-substitution")
element.text = self.time_substitution_code
e.append(element)
if build_config.substitutions and self.clock_substitution:
element = xml.Element("clock-substitution")
element.text = self.clock_substitution_code
e.append(element)
if build_config.verification:
if self.calls_quit:
element = xml.Element("verif-quit_flag")
element.text = "True"
e.append(element)
if self.occurrence_analysis:
element = xml.Element("verif-occurrence")
if self.occurrence_analysis:
element.set("process", str(self.occurrence_analysis_compare_process))
element.set("binding", str(self.occurrence_analysis_compare_binding))
e.append(element)
return e
class Place(NetElement):
default_size = (0, 0)
default_radius = 20
# Verif options
final_marking = False
def __init__(self, net, id, position):
NetElement.__init__(self, net, id, position)
p = (position[0] + self.box.radius * 0.85, position[1] + self.box.radius * 0.85)
self.place_type = citems.Text(self, "type", self.box.get_relative_placement(p))
p = (position[0] + self.box.radius * 0.85, position[1] - self.box.radius * 1.5)
self.init = citems.Text(self, "init", self.box.get_relative_placement(p))
p = self.box.get_relative_placement((- self.box.radius - 5, -5), absolute=False)
self.interface = citems.PlaceInterface(self, "interface", p)
self.trace_tokens = False
self.trace_tokens_functions = []
def get_canvas_items(self, view_mode):
items = NetElement.get_canvas_items(self, view_mode)
items.append(self.place_type)
items.append(self.init)
if self.interface.is_visible():
items.append(self.interface)
return items
def get_interface_in(self):
return self.interface.interface_in
def set_interface_in(self, value):
self.interface.interface_in = value
self.interface.update()
self.changed()
def get_interface_out(self):
return self.interface.interface_out
def set_interface_out(self, value):
self.interface.interface_out = value
self.interface.update()
self.changed()
def get_radius(self):
return self.radius
def get_init_string(self):
return self.init.text
def set_place_type(self, place_type):
self.place_type.text = place_type
self.changed()
def set_init_string(self, init_string):
self.init.text = init_string
self.changed()
def get_place_type(self):
return self.place_type.text
def is_place(self):
return True
def get_trace_label_text(self):
if self.trace_tokens:
if not self.trace_tokens_functions:
return [ "number of tokens" ]
else:
return [ trace_function.name for trace_function in self.trace_tokens_functions ]
def get_verif_label_text(self):
if self.final_marking:
return [ "final marking" ]
def get_simrun_label_text(self):
return ""
def tracing_to_xml(self):
element = xml.Element("trace")
element.set("trace-tokens", str(self.trace_tokens))
for trace_function in self.trace_tokens_functions:
e = xml.Element("function")
e.set("name", trace_function.name)
e.set("return-type", trace_function.return_type)
element.append(e)
return element
def interface_as_xml(self):
element = xml.Element("interface")
position = self.interface.get_position()
element.set("x", str(position[0]))
element.set("y", str(position[1]))
if self.interface.interface_in is not None:
element.set("in", self.interface.interface_in)
if self.interface.interface_out is not None:
element.set("out", self.interface.interface_out)
return element
def as_xml(self):
e = self.create_xml_element("place")
position = self.box.get_position()
e.set("x", str(position[0]))
e.set("y", str(position[1]))
e.set("name", str(self.box.name))
e.set("radius", str(self.box.radius))
e.set("sx", str(self.box.size[0]))
e.set("sy", str(self.box.size[1]))
e.set("label-x", str(self.label_placement.get_position()[0]))
e.set("label-y", str(self.label_placement.get_position()[1]))
if self.final_marking:
element = xml.Element("verif-final-marking")
element.text = "True"
e.append(element)
e.append(canvastext_to_xml(self.place_type, "place-type"))
e.append(canvastext_to_xml(self.init, "init"))
if self.has_code():
e.append(self.xml_code_element())
if self.interface.is_visible():
e.append(self.interface_as_xml())
e.append(self.tracing_to_xml())
return e
def export_xml(self, build_config):
e = self.create_xml_element("place")
e.set("name", self.box.name)
e.set("type", self.place_type.text)
if build_config.verification:
if self.final_marking:
element = xml.Element("verif-final-marking")
element.text = "True"
e.append(element)
if not build_config.library or self.interface.interface_in is None:
e.set("init-expr", self.init.text)
if self.has_code():
e.append(self.xml_code_element())
if build_config.tracing and self.trace_tokens:
e.append(self.tracing_to_xml())
if build_config.library:
if self.interface.interface_in is not None:
e.set("in", self.interface.interface_in)
if self.interface.interface_out is not None:
e.set("out", self.interface.interface_out)
return e
def get_final_marking(self):
return self.final_marking
def set_final_marking(self, value):
self.final_marking = value
self.changed()
class Edge(NetItem):
size_substitution = False
size_substitution_code = ""
def __init__(self, net, id, from_item, to_item, points):
NetItem.__init__(self, net, id)
self.from_item = from_item
self.to_item = to_item
self.points = [ citems.Point(self, "point", citems.AbsPlacement(p))
for p in points ]
self.line = citems.ArrowLine(self, "line", self.get_all_points)
self.inscription = citems.Text(self,
"inscription",
self.line.get_relative_placement(None),
"")
self.label_simrun = citems.SimRunLabel(self, "simrunbox",
self.inscription.get_relative_placement((0, 18), absolute=False))
self.label_simrun.text_fn = self.get_simrun_label_text
def get_simrun_label_text(self):
if self.size_substitution:
return "size: {0}".format(self.size_substitution_code)
def get_size_substitution(self):
return self.size_substitution
def set_size_substitution(self, value):
self.size_substitution = value
self.changed()
def get_size_substitution_code(self):
return self.size_substitution_code
def set_size_substitution_code(self, value):
self.size_substitution_code = value
self.changed()
def simple_copy(self):
""" Copy of edge that preserves topological properties:
id, inscription, from_item and to_item """
e = Edge(self.net, self.id, self.from_item, self.to_item, [])
e.inscription = self.inscription
e.size_substitution = self.size_substitution
e.size_substitution_code = self.size_substitution_code
return e
def get_canvas_items(self, view_mode):
items = [ self.line, self.inscription ] + self.points
if view_mode == "simrun":
items.append(self.label_simrun)
return items
def add_point(self, position):
inscription_position = self.inscription.get_position()
for i, (a, b) in enumerate(utils.pairs_generator(self.get_all_points())):
if utils.is_near_line_segment(a, b, position, 5):
point = citems.Point(self, "point", citems.AbsPlacement(position))
point.owner = self
self.points.insert(i, point)
break
self.inscription.set_position(inscription_position)
self.net.changed() # Canvas items changed, so self.changed() is not sufficient
def remove_point(self, item):
inscription_position = self.inscription.get_position()
self.points.remove(item)
self.inscription.set_position(inscription_position)
self.net.changed() # Canvas items changed, so self.changed() is not sufficient
def get_inscription(self):
return self.inscription.text
def set_inscription(self, inscription):
self.inscription.text = inscription
self.changed()
def is_bidirectional(self):
return self.line.bidirectional
def toggle_bidirectional(self):
self.line.bidirectional = not self.line.bidirectional
self.changed()
def export_form(self):
if self.is_bidirectional() and self.from_item.is_place():
e = self.simple_copy()
e.size_substitution = False
return e
return self
def make_complement(self, export_form=False):
""" This function returns exact copy of the edge with changed directions,
This is used during splitting bidirectional edges """
e = self.simple_copy()
e.switch_direction()
if export_form and e.from_item.is_place():
e.size_substitution = False
return e
def get_end_points(self):
if self.points:
p1 = self.points[0].get_position()
p2 = self.points[-1].get_position()
else:
p1 = utils.vector_add_t(
self.to_item.box.get_position(), self.to_item.box.size, 0.5)
p2 = utils.vector_add_t(
self.from_item.box.get_position(), self.from_item.box.size, 0.5)
return (self.from_item.box.get_border_point(p1),
self.to_item.box.get_border_point(p2))
def compute_insciption_point(self):
points = self.get_all_points()
if self.inscription_point < len(points) - 1:
return utils.interpolate(points[self.inscription_point],
points[self.inscription_point + 1],
self.inscription_param)
else:
return self.points[self.inscription_point]
def is_edge(self):
return True
def switch_direction(self):
inscription_position = self.inscription.get_position()
i = self.from_item
self.from_item = self.to_item
self.to_item = i
self.points.reverse()
self.inscription.set_position(inscription_position)
self.changed()
def as_xml(self):
e = self.create_xml_element("edge")
e.set("from_item", str(self.from_item.id))
e.set("to_item", str(self.to_item.id))
if self.line.bidirectional:
e.set("bidirectional", "true")
e.append(canvastext_to_xml(self.inscription, "inscription"))
for point in self.points:
pe = xml.Element("point")
position = point.get_position()
pe.set("x", str(position[0]))
pe.set("y", str(position[1]))
e.append(pe)
if self.size_substitution:
element = xml.Element("size-substitution")
element.text = self.size_substitution_code
e.append(element)
return e
def get_all_points(self):
sp, ep = self.get_end_points()
return [sp] + [ p.get_position() for p in self.points ] + [ep]
def create_xml_export_element(self, name, build_config):
e = xml.Element(name)
e.set("id", str(self.id))
if self.from_item.is_place():
e.set("place-id", str(self.from_item.get_id()))
else:
e.set("place-id", str(self.to_item.get_id()))
e.set("expr", self.inscription.text)
if build_config.substitutions and self.size_substitution:
element = xml.Element("size-substitution")
element.text = self.size_substitution_code
e.append(element)
return e
class RectItem(NetItem):
def __init__(self, net, id, position, size):
NetItem.__init__(self, net, id)
self.point1 = citems.Point(self, "point1", citems.AbsPlacement(position))
self.point1.action = "resize_ltop"
self.point2 = citems.Point(self, "point2", citems.AbsPlacement(
utils.vector_add(position, size)))
self.point2.owner = self
self.point2.action = "resize_rbottom"
def get_size(self):
return utils.make_vector(self.point1.get_position(), self.point2.get_position())
def is_inside(self, item):
return utils.position_inside_rect(
item.box.get_position(),
self.point1.get_position(),
self.get_size())
class NetArea(RectItem):
def __init__(self, net, id, position, size):
RectItem.__init__(self, net, id, position, size)
self.area = citems.Area(self, "area", self.point1, self.point2)
position = utils.vector_add(self.point1.get_position(), (0, -15))
self.init = citems.Text(self, "init", self.point1.get_relative_placement(position))
def get_canvas_items(self, view_mode):
return [ self.point1, self.point2,
self.init, self.area ]
def set_init_expr(self, init_expr):
self.init.text = init_expr
self.changed()
def get_init_expr(self):
return self.init.text
def is_area(self):
return True
def as_xml(self):
e = self.create_xml_element("area")
position = self.point1.get_position()
size = self.get_size()
e.set("x", str(position[0]))
e.set("y", str(position[1]))
e.set("sx", str(size[0]))
e.set("sy", str(size[1]))
e.append(canvastext_to_xml(self.init, "init"))
return e
def export_xml(self):
e = self.create_xml_element("area")
e.set("init-expr", self.init.text)
e.set("name", "")
items = [ item for item in self.net.places() if self.is_inside(item) ]
for item in items:
element = xml.Element("place")
element.set("id", str(item.id))
e.append(element)
return e
def places(self):
return [ place for place in self.net.places() if self.is_inside(place) ]
def transitions(self):
return [ transition for transition in self.net.transitions()
if self.is_inside(transition) ]
class BasicLoader:
"""
Loads an element id from xml and preserves original id
"""
def __init__(self, project):
self.project = project
def get_id(self, element):
id = utils.xml_int(element, "id", 0)
self.project.id_counter = max(self.project.id_counter, id)
return id
def translate_id(self, id):
return id
class NewIdLoader:
"""
Loads an element id from xml and assign new id
"""
def __init__(self, project):
self.project = project
self.idtable = {}
def get_id(self, element):
id = utils.xml_int(element, "id", 0)
new_id = self.project.new_id()
self.idtable[id] = new_id
return new_id
def translate_id(self, id):
return self.idtable[id]
def canvastext_to_xml(obj, name):
element = xml.Element(name)
position = obj.get_position()
element.set("x", str(position[0]))
element.set("y", str(position[1]))
element.text = obj.text
return element
def canvastext_from_xml(element, obj):
position = (utils.xml_int(element, "x"),
utils.xml_int(element, "y"))
obj.set_position(position)
if element.text is None:
obj.text = ""
else:
obj.text = element.text
def load_code(element):
if element.find("code") is not None:
return element.find("code").text
else:
return ""
def load_place_tracing(element, place):
if element is None:
return
place.trace_tokens = utils.xml_bool(element, "trace-tokens", False)
for e in element.findall("function"):
name = e.get("name")
return_type = e.get("return-type")
place.trace_tokens_functions.append(tracing.TraceFunction(name, return_type))
def load_place(element, net, loader):
id = loader.get_id(element)
place = net.add_place((xml_int(element,"x"), xml_int(element, "y")), id)
place.box.name = xml_str(element, "name", "")
place.box.radius = xml_int(element,"radius")
place.box.size = (xml_int(element,"sx", 0), xml_int(element,"sy", 0))
if element.get("label-x") and element.get("label-y"):
label_x = xml_int(element, "label-x")
label_y = xml_int(element, "label-y")
place.label_placement.set_position((label_x, label_y))
if element.find("place-type") is not None:
canvastext_from_xml(element.find("place-type"), place.place_type)
else:
place.place_type.text = element.get("place_type", "") # Backward compatability
if element.find("init") is not None:
canvastext_from_xml(element.find("init"), place.init)
else:
place.init.text = element.get("init_string", "") # Backward compatability
place.set_code(load_code(element))
load_place_tracing(element.find("trace"), place)
interface = element.find("interface")
if interface is not None:
place.interface.set_position((xml_int(interface, "x"),
xml_int(interface, "y")))
place.interface.interface_in = interface.get("in")
place.interface.interface_out = interface.get("out")
place.interface.update()
e = element.find("verif-final-marking")
if e is not None:
place.final_marking = bool(e.text)
def load_transition(element, net, loader):
id = loader.get_id(element)
transition = net.add_transition((xml_int(element,"x"), xml_int(element, "y")), id)
sx = xml_int(element,"sx")
sy = xml_int(element,"sy")
if element.get("label-x") and element.get("label-y"):
label_x = xml_int(element, "label-x")
label_y = xml_int(element, "label-y")
transition.label_placement.set_position((label_x, label_y))
transition.box.size = (sx, sy)
transition.box.name = xml_str(element,"name", "")
if element.find("guard") is not None:
canvastext_from_xml(element.find("guard"), transition.guard)
else:
transition.guard.text = element.get("guard", "") # Backward compatability
if element.find("root") is not None:
canvastext_from_xml(element.find("root"), transition.root)
transition.set_code(load_code(element))
transition.trace_fire = element.find("trace") is not None
transition.clock = utils.xml_bool(element, "clock", False)
transition.set_collective(utils.xml_bool(element, "collective", False))
if element.find("time-substitution") is not None:
transition.time_substitution = True
transition.time_substitution_code = element.find("time-substitution").text
if element.find("clock-substitution") is not None:
transition.clock_substitution = True
transition.clock_substitution_code = element.find("clock-substitution").text
e = element.find("verif-occurrence")
if e is not None:
transition.occurrence_analysis = True
transition.occurrence_analysis_compare_process = utils.xml_bool(e, "process")
transition.occurrence_analysis_compare_binding = utils.xml_bool(e, "binding")
transition.set_priority(element.get("priority", ""))
def load_edge(element, net, loader):
id = loader.get_id(element)
fitem = net.item_by_id(loader.translate_id(xml_int(element, "from_item")))
assert fitem is not None
titem = net.item_by_id(loader.translate_id(xml_int(element, "to_item")))
assert titem is not None
points = [ (xml_int(e, "x"), xml_int(e,"y")) for e in element.findall("point") ]
edge = net.add_edge(fitem, titem, points, id)
edge.line.bidirectional = utils.xml_bool(element, "bidirectional", False)
if element.find("inscription") is not None:
canvastext_from_xml(element.find("inscription"), edge.inscription)
else: # Backward compitabality
if element.get("inscription") is not None:
edge.inscription.text = xml_str(element, "inscription")
if element.find("size-substitution") is not None:
edge.size_substitution = True
edge.size_substitution_code = element.find("size-substitution").text
def load_area(element, net, loader):
id = loader.get_id(element)
sx = xml_int(element,"sx")
sy = xml_int(element,"sy")
px = xml_int(element, "x")
py = xml_int(element, "y")
area = net.add_area((px, py), (sx, sy), id)
if element.find("init") is not None:
canvastext_from_xml(element.find("init"), area.init)
else:
area.init.text = xml_str(element,"init-expr", "")
def load_net(element, project, loader):
name = element.get("name", "Main") # Setting "Main" for backward compatability
id = loader.get_id(element)
net = Net(project, name, id)
for e in element.findall("area"):
load_area(e, net, loader)
for e in element.findall("place"):
load_place(e, net, loader)
for e in element.findall("transition"):
load_transition(e, net, loader)
for e in element.findall("edge"):
load_edge(e, net, loader)
return net
| Kobzol/kaira | gui/net.py | Python | gpl-3.0 | 39,085 |
#!/usr/bin/env python2.7
from nltk.book import *
print text4.collocations()
print
print text8.collocations() | prinsmike/nltk_book | ch01/17-collocations.py | Python | apache-2.0 | 110 |
#
# Copyright 2017 CNIT - Consorzio Nazionale Interuniversitario per le Telecomunicazioni
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import logging
import copy
from lib.rdcl_graph import RdclGraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger('OshiRdclGraph')
class OshiRdclGraph(RdclGraph):
"""Operates on the graph representation used for the GUI graph views"""
def __init__(self):
pass
def build_graph_from_project(self, json_project, model={}):
"""Creates a single graph for a whole project"""
# in oshi is not supported
graph_object = {
'vertices': [],
'edges': [],
'graph_parameters': {},
'model': model
}
# try:
#
#
# except Exception as e:
# log.exception('Exception in build_graph_from_project')
# raise
return graph_object
def build_graph_from_oshi_descriptor(self, json_data, positions={}, model={}):
"""Creates a single graph for a oshi descriptor"""
try:
graph_object = json_data
for node in graph_object['vertices']:
if positions and 'vertices' in positions and node['id'] in positions['vertices'] and 'x' in positions['vertices'][node['id']] and 'y' in positions['vertices'][node['id']]:
node['fx'] = positions['vertices'][node['id']]['x']
node['fy'] = positions['vertices'][node['id']]['y']
graph_object['model'] = model
except Exception as e:
log.exception('Exception in build_graph_from_project')
raise
return graph_object
| superfluidity/RDCL3D | code/lib/oshi/oshi_rdcl_graph.py | Python | apache-2.0 | 2,216 |
"""engine.SCons.Variables.ListVariable
This file defines the option type for SCons implementing 'lists'.
A 'list' option may either be 'all', 'none' or a list of names
separated by comma. After the option has been processed, the option
value holds either the named list elements, all list elemens or no
list elements at all.
Usage example:
list_of_libs = Split('x11 gl qt ical')
opts = Variables()
opts.Add(ListVariable('shared',
'libraries to build as shared libraries',
'all',
elems = list_of_libs))
...
for lib in list_of_libs:
if lib in env['shared']:
env.SharedObject(...)
else:
env.Object(...)
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/ListVariable.py 2009/09/04 16:33:07 david"
# Know Bug: This should behave like a Set-Type, but does not really,
# since elements can occur twice.
__all__ = ['ListVariable',]
import string
import UserList
import SCons.Util
class _ListVariable(UserList.UserList):
def __init__(self, initlist=[], allowedElems=[]):
UserList.UserList.__init__(self, filter(None, initlist))
self.allowedElems = allowedElems[:]
self.allowedElems.sort()
def __cmp__(self, other):
raise NotImplementedError
def __eq__(self, other):
raise NotImplementedError
def __ge__(self, other):
raise NotImplementedError
def __gt__(self, other):
raise NotImplementedError
def __le__(self, other):
raise NotImplementedError
def __lt__(self, other):
raise NotImplementedError
def __str__(self):
if len(self) == 0:
return 'none'
self.data.sort()
if self.data == self.allowedElems:
return 'all'
else:
return string.join(self, ',')
def prepare_to_store(self):
return self.__str__()
def _converter(val, allowedElems, mapdict):
"""
"""
if val == 'none':
val = []
elif val == 'all':
val = allowedElems
else:
val = filter(None, string.split(val, ','))
val = map(lambda v, m=mapdict: m.get(v, v), val)
notAllowed = filter(lambda v, aE=allowedElems: not v in aE, val)
if notAllowed:
raise ValueError("Invalid value(s) for option: %s" %
string.join(notAllowed, ','))
return _ListVariable(val, allowedElems)
## def _validator(key, val, env):
## """
## """
## # todo: write validater for pgk list
## return 1
def ListVariable(key, help, default, names, map={}):
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validater appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (separated by space).
"""
names_str = 'allowed names: %s' % string.join(names, ' ')
if SCons.Util.is_List(default):
default = string.join(default, ',')
help = string.join(
(help, '(all|none|comma-separated list of names)', names_str),
'\n ')
return (key, help, default,
None, #_validator,
lambda val, elems=names, m=map: _converter(val, elems, m))
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| cournape/numscons | numscons/scons-local/scons-local-1.2.0/SCons/Variables/ListVariable.py | Python | bsd-3-clause | 4,560 |
# Copyright (C) 2002-2005 Stephen Kennedy <stevek@gnome.org>
# Copyright (C) 2011-2013, 2015 Kai Willadsen <kai.willadsen@gmail.com>
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import errno
import glob
import os
import xml.etree.ElementTree as ElementTree
from meld.conf import _
from . import _vc
#: Simple enum constants for differentiating conflict cases.
CONFLICT_TYPE_MERGE, CONFLICT_TYPE_UPDATE = 1, 2
class Vc(_vc.Vc):
CMD = "svn"
NAME = "Subversion"
VC_DIR = ".svn"
state_map = {
"unversioned": _vc.STATE_NONE,
"added": _vc.STATE_NEW,
"normal": _vc.STATE_NORMAL,
"missing": _vc.STATE_MISSING,
"ignored": _vc.STATE_IGNORED,
"modified": _vc.STATE_MODIFIED,
"deleted": _vc.STATE_REMOVED,
"conflicted": _vc.STATE_CONFLICT,
}
def commit(self, runner, files, message):
command = [self.CMD, 'commit', '-m', message]
runner(command, files, refresh=True, working_dir=self.root)
def update(self, runner):
command = [self.CMD, 'update']
runner(command, [], refresh=True, working_dir=self.root)
def remove(self, runner, files):
command = [self.CMD, 'rm', '--force']
runner(command, files, refresh=True, working_dir=self.root)
def revert(self, runner, files):
command = [self.CMD, 'revert']
runner(command, files, refresh=True, working_dir=self.root)
def resolve(self, runner, files):
command = [self.CMD, 'resolve', '--accept=working']
runner(command, files, refresh=True, working_dir=self.root)
def get_path_for_repo_file(self, path, commit=None):
if commit is None:
commit = "BASE"
else:
raise NotImplementedError()
if not path.startswith(self.root + os.path.sep):
raise _vc.InvalidVCPath(self, path, "Path not in repository")
path = path[len(self.root) + 1:]
suffix = os.path.splitext(path)[1]
args = [self.CMD, "cat", "-r", commit, path]
return _vc.call_temp_output(args, cwd=self.root, suffix=suffix)
def get_path_for_conflict(self, path, conflict=None):
"""
SVN has two types of conflicts:
Merge conflicts, which give 3 files:
.left.r* (THIS)
.working (BASE... although this is a bit debatable)
.right.r* (OTHER)
Update conflicts which give 3 files:
.mine (THIS)
.r* (lower - BASE)
.r* (higher - OTHER)
"""
if not path.startswith(self.root + os.path.sep):
raise _vc.InvalidVCPath(self, path, "Path not in repository")
# If this is merged, we just return the merged output
if conflict == _vc.CONFLICT_MERGED:
return path, False
# First fine what type of conflict this is by looking at the base
# we can possibly return straight away!
conflict_type = None
base = glob.glob('%s.working' % path)
if len(base) == 1:
# We have a merge conflict
conflict_type = CONFLICT_TYPE_MERGE
else:
base = glob.glob('%s.mine' % path)
if len(base) == 1:
# We have an update conflict
conflict_type = CONFLICT_TYPE_UPDATE
if conflict_type is None:
raise _vc.InvalidVCPath(self, path, "No known conflict type found")
if conflict == _vc.CONFLICT_BASE:
return base[0], False
elif conflict == _vc.CONFLICT_THIS:
if conflict_type == CONFLICT_TYPE_MERGE:
return glob.glob('%s.merge-left.r*' % path)[0], False
else:
return glob.glob('%s.r*' % path)[0], False
elif conflict == _vc.CONFLICT_OTHER:
if conflict_type == CONFLICT_TYPE_MERGE:
return glob.glob('%s.merge-right.r*' % path)[0], False
else:
return glob.glob('%s.r*' % path)[-1], False
raise KeyError("Conflict file does not exist")
def add(self, runner, files):
# SVN < 1.7 needs to add folders from their immediate parent
dirs = [s for s in files if os.path.isdir(s)]
files = [s for s in files if os.path.isfile(s)]
command = [self.CMD, 'add']
for path in dirs:
runner(command, [path], refresh=True,
working_dir=os.path.dirname(path))
if files:
runner(command, files, refresh=True, working_dir=self.location)
@classmethod
def _repo_version_support(cls, version):
return version >= 12
@classmethod
def valid_repo(cls, path):
if _vc.call([cls.CMD, "info"], cwd=path):
return False
root, location = cls.is_in_repo(path)
vc_dir = os.path.join(root, cls.VC_DIR)
# Check for repository version, trusting format file then entries file
repo_version = None
for filename in ("format", "entries"):
path = os.path.join(vc_dir, filename)
if os.path.exists(path):
with open(path) as f:
repo_version = int(f.readline().strip())
break
if not repo_version and os.path.exists(os.path.join(vc_dir, "wc.db")):
repo_version = 12
return cls._repo_version_support(repo_version)
def _update_tree_state_cache(self, path):
while 1:
try:
# "svn --xml" outputs utf8, even with Windows non-utf8 locale
proc = _vc.popen(
[self.CMD, "status", "-v", "--xml", path],
cwd=self.location, use_locale_encoding=False)
tree = ElementTree.parse(proc)
break
except OSError as e:
if e.errno != errno.EAGAIN:
raise
for target in tree.findall("target") + tree.findall("changelist"):
for entry in target.iter(tag="entry"):
path = entry.attrib["path"]
if not path:
continue
if not os.path.isabs(path):
path = os.path.abspath(os.path.join(self.location, path))
for status in entry.iter(tag="wc-status"):
item = status.attrib["item"]
if item == "":
continue
state = self.state_map.get(item, _vc.STATE_NONE)
self._tree_cache[path] = state
rev = status.attrib.get("revision")
rev_label = _("Rev %s") % rev if rev is not None else ''
self._tree_meta_cache[path] = rev_label
self._add_missing_cache_entry(path, state)
| yousseb/meld | meld/vc/svn.py | Python | gpl-2.0 | 7,923 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import errno
import logging
import os
import random
import shutil
import string
import time
from StringIO import StringIO
import psycopg2
import openerp
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
import openerp.report.interface
from openerp.tools.misc import ustr
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from content_index import cntIndex
_logger = logging.getLogger(__name__)
class document_file(osv.osv):
_inherit = 'ir.attachment'
_columns = {
# Columns from ir.attachment:
'write_date': fields.datetime('Date Modified', readonly=True),
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
# Fields of document:
'user_id': fields.many2one('res.users', 'Owner', select=1),
'parent_id': fields.many2one('document.directory', 'Directory', select=1, change_default=True),
'index_content': fields.text('Indexed Content'),
'partner_id':fields.many2one('res.partner', 'Partner', select=1),
'file_type': fields.char('Content Type'),
}
_order = "id desc"
_defaults = {
'user_id': lambda self, cr, uid, ctx:uid,
}
_sql_constraints = [
('filename_unique', 'unique (name,parent_id)', 'The filename must be unique in a directory !'),
]
def check(self, cr, uid, ids, mode, context=None, values=None):
"""Overwrite check to verify access on directory to validate specifications of doc/access_permissions.rst"""
if not isinstance(ids, list):
ids = [ids]
super(document_file, self).check(cr, uid, ids, mode, context=context, values=values)
if ids:
self.pool.get('ir.model.access').check(cr, uid, 'document.directory', mode)
# use SQL to avoid recursive loop on read
cr.execute('SELECT DISTINCT parent_id from ir_attachment WHERE id in %s AND parent_id is not NULL', (tuple(ids),))
self.pool.get('document.directory').check_access_rule(cr, uid, [parent_id for (parent_id,) in cr.fetchall()], mode, context=context)
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
# Grab ids, bypassing 'count'
ids = super(document_file, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False)
if not ids:
return 0 if count else []
# Filter out documents that are in directories that the user is not allowed to read.
# Must use pure SQL to avoid access rules exceptions (we want to remove the records,
# not fail), and the records have been filtered in parent's search() anyway.
cr.execute('SELECT id, parent_id from ir_attachment WHERE id in %s', (tuple(ids),))
# cont a dict of parent -> attach
parents = {}
for attach_id, attach_parent in cr.fetchall():
parents.setdefault(attach_parent, []).append(attach_id)
parent_ids = parents.keys()
# filter parents
visible_parent_ids = self.pool.get('document.directory').search(cr, uid, [('id', 'in', list(parent_ids))])
# null parents means allowed
ids = parents.get(None,[])
for parent_id in visible_parent_ids:
ids.extend(parents[parent_id])
return len(ids) if count else ids
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default = {}
if 'name' not in default:
name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update(name=_("%s (copy)") % (name))
return super(document_file, self).copy(cr, uid, id, default, context=context)
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
vals['parent_id'] = context.get('parent_id', False) or vals.get('parent_id', False)
# take partner from uid
if vals.get('res_id', False) and vals.get('res_model', False) and not vals.get('partner_id', False):
vals['partner_id'] = self.__get_partner_id(cr, uid, vals['res_model'], vals['res_id'], context)
if vals.get('datas', False):
vals['file_type'], vals['index_content'] = self._index(cr, uid, vals['datas'].decode('base64'), vals.get('datas_fname', False), None)
return super(document_file, self).create(cr, uid, vals, context)
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
if vals.get('datas', False):
vals['file_type'], vals['index_content'] = self._index(cr, uid, vals['datas'].decode('base64'), vals.get('datas_fname', False), None)
return super(document_file, self).write(cr, uid, ids, vals, context)
def _index(self, cr, uid, data, datas_fname, file_type):
mime, icont = cntIndex.doIndex(data, datas_fname, file_type or None, None)
icont_u = ustr(icont)
return mime, icont_u
def __get_partner_id(self, cr, uid, res_model, res_id, context=None):
""" A helper to retrieve the associated partner from any res_model+id
It is a hack that will try to discover if the mentioned record is
clearly associated with a partner record.
"""
obj_model = self.pool[res_model]
if obj_model._name == 'res.partner':
return res_id
elif 'partner_id' in obj_model._columns and obj_model._columns['partner_id']._obj == 'res.partner':
bro = obj_model.browse(cr, uid, res_id, context=context)
return bro.partner_id.id
return False
class document_directory(osv.osv):
_name = 'document.directory'
_description = 'Directory'
_order = 'name'
_columns = {
'name': fields.char('Name', required=True, select=1),
'write_date': fields.datetime('Date Modified', readonly=True),
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'user_id': fields.many2one('res.users', 'Owner'),
'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
'parent_id': fields.many2one('document.directory', 'Parent Directory', select=1, change_default=True),
'child_ids': fields.one2many('document.directory', 'parent_id', 'Children'),
'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
'type': fields.selection([ ('directory','Static Directory'), ('ressource','Folders per resource'), ],
'Type', required=True, select=1, change_default=True,
help="Each directory can either have the type Static or be linked to another resource. A static directory, as with Operating Systems, is the classic directory that can contain a set of files. The directories linked to systems resources automatically possess sub-directories for each of resource types defined in the parent directory."),
'domain': fields.char('Domain', help="Use a domain if you want to apply an automatic filter on visible resources."),
'ressource_type_id': fields.many2one('ir.model', 'Resource model', change_default=True,
help="Select an object here and there will be one folder per record of that resource."),
'resource_field': fields.many2one('ir.model.fields', 'Name field', help='Field to be used as name on resource directories. If empty, the "name" will be used.'),
'resource_find_all': fields.boolean('Find all resources',
help="If true, all attachments that match this resource will " \
" be located. If false, only ones that have this as parent." ),
'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model', change_default=True,
help="If you put an object here, this directory template will appear bellow all of these objects. " \
"Such directories are \"attached\" to the specific model or record, just like attachments. " \
"Don't put a parent directory if you select a parent model."),
'ressource_id': fields.integer('Resource ID',
help="Along with Parent Model, this ID attaches this folder to a specific record of Parent Model."),
'ressource_tree': fields.boolean('Tree Structure',
help="Check this if you want to use the same tree structure as the object selected in the system."),
'dctx_ids': fields.one2many('document.directory.dctx', 'dir_id', 'Context fields'),
'company_id': fields.many2one('res.company', 'Company', change_default=True),
}
_defaults = {
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'document.directory', context=c),
'user_id': lambda self,cr,uid,ctx: uid,
'domain': '[]',
'type': 'directory',
'ressource_id': 0,
'resource_find_all': True,
}
_sql_constraints = [
('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !'),
('no_selfparent', 'check(parent_id <> id)', 'Directory cannot be parent of itself!'),
]
def name_get(self, cr, uid, ids, context=None):
res = []
if not self.search(cr,uid,[('id','in',ids)]):
ids = []
for d in self.browse(cr, uid, ids, context=context):
s = ''
d2 = d
while d2 and d2.parent_id:
s = d2.name + (s and ('/' + s) or '')
d2 = d2.parent_id
res.append((d.id, s or d.name))
return res
def get_full_path(self, cr, uid, dir_id, context=None):
""" Return the full path to this directory, in a list, root first
"""
if isinstance(dir_id, (tuple, list)):
assert len(dir_id) == 1
dir_id = dir_id[0]
def _parent(dir_id, path):
parent=self.browse(cr, uid, dir_id)
if parent.parent_id and not parent.ressource_parent_type_id:
_parent(parent.parent_id.id,path)
path.append(parent.name)
else:
path.append(parent.name)
return path
path = []
_parent(dir_id, path)
return path
def _check_recursion(self, cr, uid, ids, context=None):
level = 100
while len(ids):
cr.execute('select distinct parent_id from document_directory where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
_constraints = [
(_check_recursion, 'Error! You cannot create recursive directories.', ['parent_id'])
]
def onchange_content_id(self, cr, uid, ids, ressource_type_id):
return {}
def get_object(self, cr, uid, uri, context=None):
""" Return a node object for the given uri.
This fn merely passes the call to node_context
"""
return get_node_context(cr, uid, context).get_uri(cr, uri)
def get_node_class(self, cr, uid, ids, dbro=None, dynamic=False, context=None):
"""Retrieve the class of nodes for this directory
This function can be overriden by inherited classes ;)
@param dbro The browse object, if caller already has it
"""
if dbro is None:
dbro = self.browse(cr, uid, ids, context=context)
if dynamic:
return node_res_obj
elif dbro.type == 'directory':
return node_dir
elif dbro.type == 'ressource':
return node_res_dir
else:
raise ValueError("dir node for %s type.", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None):
""" Fill nctx with properties for this database
@param nctx instance of nodes.node_context, to be filled
@param context ORM context (dict) for us
Note that this function is called *without* a list of ids,
it should behave the same for the whole database (based on the
ORM instance of document.directory).
Some databases may override this and attach properties to the
node_context. See WebDAV, CalDAV.
"""
return
def get_dir_permissions(self, cr, uid, ids, context=None):
"""Check what permission user 'uid' has on directory 'id'
"""
assert len(ids) == 1
res = 0
for pperms in [('read', 5), ('write', 2), ('unlink', 8)]:
try:
self.check_access_rule(cr, uid, ids, pperms[0], context=context)
res |= pperms[1]
except except_orm:
pass
return res
def _locate_child(self, cr, uid, root_id, uri, nparent, ncontext):
""" try to locate the node in uri,
Return a tuple (node_dir, remaining_path)
"""
return (node_database(context=ncontext), uri)
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default ={}
name = self.read(cr, uid, [id])[0]['name']
default.update(name=_("%s (copy)") % (name))
return super(document_directory,self).copy(cr, uid, id, default, context=context)
def _check_duplication(self, cr, uid, vals, ids=None, op='create'):
name=vals.get('name',False)
parent_id=vals.get('parent_id',False)
ressource_parent_type_id=vals.get('ressource_parent_type_id',False)
ressource_id=vals.get('ressource_id',0)
if op=='write':
for directory in self.browse(cr, SUPERUSER_ID, ids):
if not name:
name=directory.name
if not parent_id:
parent_id=directory.parent_id and directory.parent_id.id or False
# TODO fix algo
if not ressource_parent_type_id:
ressource_parent_type_id=directory.ressource_parent_type_id and directory.ressource_parent_type_id.id or False
if not ressource_id:
ressource_id=directory.ressource_id and directory.ressource_id or 0
res=self.search(cr,uid,[('id','<>',directory.id),('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
if len(res):
return False
if op=='create':
res = self.search(cr, SUPERUSER_ID, [('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
if len(res):
return False
return True
def write(self, cr, uid, ids, vals, context=None):
if not self._check_duplication(cr, uid, vals, ids, op='write'):
raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
return super(document_directory,self).write(cr, uid, ids, vals, context=context)
def create(self, cr, uid, vals, context=None):
if not self._check_duplication(cr, uid, vals):
raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
newname = vals.get('name',False)
if newname:
for illeg in ('/', '@', '$', '#'):
if illeg in newname:
raise osv.except_osv(_('ValidateError'), _('Directory name contains special characters!'))
return super(document_directory,self).create(cr, uid, vals, context)
class document_directory_dctx(osv.osv):
""" In order to evaluate dynamic folders, child items could have a limiting
domain expression. For that, their parents will export a context where useful
information will be passed on.
If you define sth like "s_id" = "this.id" at a folder iterating over sales, its
children could have a domain like [('sale_id', = ,s_id )]
This system should be used recursively, that is, parent dynamic context will be
appended to all children down the tree.
"""
_name = 'document.directory.dctx'
_description = 'Directory Dynamic Context'
_columns = {
'dir_id': fields.many2one('document.directory', 'Directory', required=True, ondelete="cascade"),
'field': fields.char('Field', required=True, select=1, help="The name of the field."),
'expr': fields.char('Expression', required=True, help="A python expression used to evaluate the field.\n" + \
"You can use 'dir_id' for current dir, 'res_id', 'res_model' as a reference to the current record, in dynamic folders"),
}
class document_directory_content_type(osv.osv):
_name = 'document.directory.content.type'
_description = 'Directory Content Type'
_columns = {
'name': fields.char('Content Type', required=True),
'code': fields.char('Extension', size=4),
'active': fields.boolean('Active'),
'mimetype': fields.char('Mime Type')
}
_defaults = {
'active': lambda *args: 1
}
class document_directory_content(osv.osv):
_name = 'document.directory.content'
_description = 'Directory Content'
_order = "sequence"
def _extension_get(self, cr, uid, context=None):
cr.execute('select code,name from document_directory_content_type where active')
res = cr.fetchall()
return res
_columns = {
'name': fields.char('Content Name', required=True),
'sequence': fields.integer('Sequence', size=16),
'prefix': fields.char('Prefix', size=16),
'suffix': fields.char('Suffix', size=16),
'report_id': fields.many2one('ir.actions.report.xml', 'Report'),
'extension': fields.selection(_extension_get, 'Document Type', required=True, size=4),
'include_name': fields.boolean('Include Record Name',
help="Check this field if you want that the name of the file to contain the record name." \
"\nIf set, the directory will have to be a resource one."),
'directory_id': fields.many2one('document.directory', 'Directory'),
}
_defaults = {
'extension': lambda *args: '.pdf',
'sequence': lambda *args: 1,
'include_name': lambda *args: 1,
}
def _file_get(self, cr, node, nodename, content, context=None):
""" return the nodes of a <node> parent having a <content> content
The return value MUST be false or a list of node_class objects.
"""
# TODO: respect the context!
model = node.res_model
if content.include_name and not model:
return False
res2 = []
tname = ''
if content.include_name:
record_name = node.displayname or ''
if record_name:
tname = (content.prefix or '') + record_name + (content.suffix or '') + (content.extension or '')
else:
tname = (content.prefix or '') + (content.name or '') + (content.suffix or '') + (content.extension or '')
if tname.find('/'):
tname=tname.replace('/', '_')
act_id = False
if 'dctx_res_id' in node.dctx:
act_id = node.dctx['res_id']
elif hasattr(node, 'res_id'):
act_id = node.res_id
else:
act_id = node.context.context.get('res_id',False)
if not nodename:
n = node_content(tname, node, node.context,content, act_id=act_id)
res2.append( n)
else:
if nodename == tname:
n = node_content(tname, node, node.context,content, act_id=act_id)
n.fill_fields(cr)
res2.append(n)
return res2
def process_write(self, cr, uid, node, data, context=None):
if node.extension != '.pdf':
raise Exception("Invalid content: %s" % node.extension)
return True
def process_read(self, cr, uid, node, context=None):
if node.extension != '.pdf':
raise Exception("Invalid content: %s" % node.extension)
report = self.pool.get('ir.actions.report.xml').browse(cr, uid, node.report_id, context=context)
srv = openerp.report.interface.report_int._reports['report.'+report.report_name]
ctx = node.context.context.copy()
ctx.update(node.dctx)
pdf,pdftype = srv.create(cr, uid, [node.act_id,], {}, context=ctx)
return pdf
class ir_action_report_xml(osv.osv):
_name="ir.actions.report.xml"
_inherit ="ir.actions.report.xml"
def _model_get(self, cr, uid, ids, name, arg, context=None):
res = {}
model_pool = self.pool.get('ir.model')
for data in self.read(cr, uid, ids, ['model']):
model = data.get('model',False)
if model:
model_id =model_pool.search(cr, uid, [('model','=',model)])
if model_id:
res[data.get('id')] = model_id[0]
else:
res[data.get('id')] = False
return res
def _model_search(self, cr, uid, obj, name, args, context=None):
if not len(args):
return []
assert len(args) == 1 and args[0][1] == '=', 'expression is not what we expect: %r' % args
model_id= args[0][2]
if not model_id:
# a deviation from standard behavior: when searching model_id = False
# we return *all* reports, not just ones with empty model.
# One reason is that 'model' is a required field so far
return []
model = self.pool.get('ir.model').read(cr, uid, [model_id])[0]['model']
report_id = self.search(cr, uid, [('model','=',model)])
if not report_id:
return [('id','=','0')]
return [('id','in',report_id)]
_columns={
'model_id' : fields.function(_model_get, fnct_search=_model_search, string='Model Id'),
}
class document_storage(osv.osv):
""" The primary object for data storage. Deprecated. """
_name = 'document.storage'
_description = 'Storage Media'
def get_data(self, cr, uid, id, file_node, context=None, fil_obj=None):
""" retrieve the contents of some file_node having storage_id = id
optionally, fil_obj could point to the browse object of the file
(ir.attachment)
"""
boo = self.browse(cr, uid, id, context=context)
if fil_obj:
ira = fil_obj
else:
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
data = ira.datas
if data:
out = data.decode('base64')
else:
out = ''
return out
def get_file(self, cr, uid, id, file_node, mode, context=None):
""" Return a file-like object for the contents of some node
"""
if context is None:
context = {}
boo = self.browse(cr, uid, id, context=context)
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
return nodefd_db(file_node, ira_browse=ira, mode=mode)
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None):
""" store the data.
This function MUST be used from an ir.attachment. It wouldn't make sense
to store things persistently for other types (dynamic).
"""
boo = self.browse(cr, uid, id, context=context)
if fil_obj:
ira = fil_obj
else:
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
_logger.debug( "Store data for ir.attachment #%d." % ira.id)
store_fname = None
fname = None
filesize = len(data)
self.pool.get('ir.attachment').write(cr, uid, [file_node.file_id], {'datas': data.encode('base64')}, context=context)
# 2nd phase: store the metadata
try:
icont = ''
mime = ira.file_type
if not mime:
mime = ""
try:
mime, icont = cntIndex.doIndex(data, ira.datas_fname, ira.file_type or None, fname)
except Exception:
_logger.debug('Cannot index file.', exc_info=True)
pass
try:
icont_u = ustr(icont)
except UnicodeError:
icont_u = ''
# a hack: /assume/ that the calling write operation will not try
# to write the fname and size, and update them in the db concurrently.
# We cannot use a write() here, because we are already in one.
cr.execute('UPDATE ir_attachment SET file_size = %s, index_content = %s, file_type = %s WHERE id = %s', (filesize, icont_u, mime, file_node.file_id))
file_node.content_length = filesize
file_node.content_type = mime
return True
except Exception, e :
_logger.warning("Cannot save data.", exc_info=True)
# should we really rollback once we have written the actual data?
# at the db case (only), that rollback would be safe
raise except_orm(_('Error at doc write!'), str(e))
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
Note: a place to fix if datetime is used in db.
"""
if not cre:
return time.time()
frac = 0.0
if isinstance(cre, basestring) and '.' in cre:
fdot = cre.find('.')
frac = float(cre[fdot:])
cre = cre[:fdot]
return time.mktime(time.strptime(cre,'%Y-%m-%d %H:%M:%S')) + frac
def get_node_context(cr, uid, context):
return node_context(cr, uid, context)
#
# An object that represent an uri
# path: the uri of the object
# content: the Content it belongs to (_print.pdf)
# type: content or collection
# content: objct = res.partner
# collection: object = directory, object2 = res.partner
# file: objct = ir.attachement
# root: if we are at the first directory of a ressource
#
class node_context(object):
""" This is the root node, representing access to some particular context
A context is a set of persistent data, which may influence the structure
of the nodes. All other transient information during a data query should
be passed down with function arguments.
"""
cached_roots = {}
node_file_class = None
def __init__(self, cr, uid, context=None):
self.dbname = cr.dbname
self.uid = uid
self.context = context
if context is None:
context = {}
context['uid'] = uid
self._dirobj = openerp.registry(cr.dbname).get('document.directory')
self.node_file_class = node_file
self.extra_ctx = {} # Extra keys for context, that do _not_ trigger inequality
assert self._dirobj
self._dirobj._prepare_context(cr, uid, self, context=context)
self.rootdir = False #self._dirobj._get_root_directory(cr,uid,context)
def __eq__(self, other):
if not type(other) == node_context:
return False
if self.dbname != other.dbname:
return False
if self.uid != other.uid:
return False
if self.context != other.context:
return False
if self.rootdir != other.rootdir:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def get(self, name, default=None):
return self.context.get(name, default)
def get_uri(self, cr, uri):
""" Although this fn passes back to doc.dir, it is needed since
it is a potential caching point.
"""
(ndir, duri) = self._dirobj._locate_child(cr, self.uid, self.rootdir, uri, None, self)
while duri:
ndir = ndir.child(cr, duri[0])
if not ndir:
return False
duri = duri[1:]
return ndir
def get_dir_node(self, cr, dbro):
"""Create (or locate) a node for a directory
@param dbro a browse object of document.directory
"""
fullpath = dbro.get_full_path(context=self.context)
klass = dbro.get_node_class(dbro, context=self.context)
return klass(fullpath, None ,self, dbro)
def get_file_node(self, cr, fbro):
""" Create or locate a node for a static file
@param fbro a browse object of an ir.attachment
"""
parent = None
if fbro.parent_id:
parent = self.get_dir_node(cr, fbro.parent_id)
return self.node_file_class(fbro.name, parent, self, fbro)
class node_class(object):
""" this is a superclass for our inodes
It is an API for all code that wants to access the document files.
Nodes have attributes which contain usual file properties
"""
our_type = 'baseclass'
DAV_PROPS = None
DAV_M_NS = None
def __init__(self, path, parent, context):
assert isinstance(context,node_context)
assert (not parent ) or isinstance(parent,node_class)
self.path = path
self.context = context
self.type=self.our_type
self.parent = parent
self.uidperms = 5 # computed permissions for our uid, in unix bits
self.mimetype = 'application/octet-stream'
self.create_date = None
self.write_date = None
self.unixperms = 0660
self.uuser = 'user'
self.ugroup = 'group'
self.content_length = 0
# dynamic context:
self.dctx = {}
if parent:
self.dctx = parent.dctx.copy()
self.displayname = 'Object'
def __eq__(self, other):
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def full_path(self):
""" Return the components of the full path for some
node.
The returned list only contains the names of nodes.
"""
if self.parent:
s = self.parent.full_path()
else:
s = []
if isinstance(self.path,list):
s+=self.path
elif self.path is None:
s.append('')
else:
s.append(self.path)
return s #map(lambda x: '/' +x, s)
def __repr__(self):
return "%s@/%s" % (self.our_type, '/'.join(self.full_path()))
def children(self, cr, domain=None):
print "node_class.children()"
return [] #stub
def child(self, cr, name, domain=None):
print "node_class.child()"
return None
def get_uri(self, cr, uri):
duri = uri
ndir = self
while duri:
ndir = ndir.child(cr, duri[0])
if not ndir:
return False
duri = duri[1:]
return ndir
def path_get(self):
print "node_class.path_get()"
return False
def get_data(self, cr):
raise TypeError('No data for %s.'% self.type)
def open_data(self, cr, mode):
""" Open a node_descriptor object for this node.
@param the mode of open, eg 'r', 'w', 'a', like file.open()
This operation may lock the data for this node (and accross
other node hierarchies), until the descriptor is close()d. If
the node is locked, subsequent opens (depending on mode) may
immediately fail with an exception (which?).
For this class, there is no data, so no implementation. Each
child class that has data should override this.
"""
raise TypeError('No data for %s.' % self.type)
def get_etag(self, cr):
""" Get a tag, unique per object + modification.
see. http://tools.ietf.org/html/rfc2616#section-13.3.3 """
return '"%s-%s"' % (self._get_ttag(cr), self._get_wtag(cr))
def _get_wtag(self, cr):
""" Return the modification time as a unique, compact string """
return str(_str2time(self.write_date)).replace('.','')
def _get_ttag(self, cr):
""" Get a unique tag for this type/id of object.
Must be overriden, so that each node is uniquely identified.
"""
print "node_class.get_ttag()",self
raise NotImplementedError("get_ttag stub()")
def get_dav_props(self, cr):
""" If this class has special behaviour for GroupDAV etc, export
its capabilities """
# This fn is placed here rather than WebDAV, because we want the
# baseclass methods to apply to all node subclasses
return self.DAV_PROPS or {}
def match_dav_eprop(self, cr, match, ns, prop):
res = self.get_dav_eprop(cr, ns, prop)
if res == match:
return True
return False
def get_dav_eprop(self, cr, ns, prop):
if not self.DAV_M_NS:
return None
if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns]
else:
_logger.debug('No namespace: %s ("%s").',ns, prop)
return None
mname = prefix + "_" + prop.replace('-','_')
if not hasattr(self, mname):
return None
try:
m = getattr(self, mname)
r = m(cr)
return r
except AttributeError:
_logger.debug('The property %s is not supported.' % prop, exc_info=True)
return None
def get_dav_resourcetype(self, cr):
""" Get the DAV resource type.
Is here because some nodes may exhibit special behaviour, like
CalDAV/GroupDAV collections
"""
raise NotImplementedError
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
""" Move this node to a new parent directory.
@param ndir_node the collection that this node should be moved under
@param new_name a name to rename this node to. If omitted, the old
name is preserved
@param fil_obj, can be None, is the browse object for the file,
if already available.
@param ndir_obj must be the browse object to the new doc.directory
location, where this node should be moved to.
in_write: When called by write(), we shouldn't attempt to write the
object, but instead return the dict of vals (avoid re-entrance).
If false, we should write all data to the object, here, as if the
caller won't do anything after calling move_to()
Return value:
True: the node is moved, the caller can update other values, too.
False: the node is either removed or fully updated, the caller
must discard the fil_obj, not attempt to write any more to it.
dict: values to write back to the object. *May* contain a new id!
Depending on src and target storage, implementations of this function
could do various things.
Should also consider node<->content, dir<->dir moves etc.
Move operations, as instructed from APIs (e.g. request from DAV) could
use this function.
"""
raise NotImplementedError(repr(self))
def create_child(self, cr, path, data=None):
""" Create a regular file under this node
"""
_logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create file(s) here.")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
_logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folder(s) here.")
def rm(self, cr):
raise NotImplementedError(repr(self))
def rmcol(self, cr):
raise NotImplementedError(repr(self))
def get_domain(self, cr, filters):
# TODO Document
return []
def check_perms(self, perms):
""" Check the permissions of the current node.
@param perms either an integers of the bits to check, or
a string with the permission letters
Permissions of nodes are (in a unix way):
1, x : allow descend into dir
2, w : allow write into file, or modification to dir
4, r : allow read of file, or listing of dir contents
8, u : allow remove (unlink)
"""
if isinstance(perms, str):
pe2 = 0
chars = { 'x': 1, 'w': 2, 'r': 4, 'u': 8 }
for c in perms:
pe2 = pe2 | chars[c]
perms = pe2
elif isinstance(perms, int):
if perms < 0 or perms > 15:
raise ValueError("Invalid permission bits.")
else:
raise ValueError("Invalid permission attribute.")
return ((self.uidperms & perms) == perms)
class node_database(node_class):
""" A node representing the database directory
"""
our_type = 'database'
def __init__(self, path=None, parent=False, context=None):
if path is None:
path = []
super(node_database,self).__init__(path, parent, context)
self.unixperms = 040750
self.uidperms = 5
def children(self, cr, domain=None):
res = self._child_get(cr, domain=domain) + self._file_get(cr)
return res
def child(self, cr, name, domain=None):
res = self._child_get(cr, name, domain=None)
if res:
return res[0]
res = self._file_get(cr,name)
if res:
return res[0]
return None
def _child_get(self, cr, name=False, domain=None):
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('parent_id','=', False), ('ressource_parent_type_id','=',False)]
if name:
where.append(('name','=',name))
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.")
if domain:
where = where + domain
ids = dirobj.search(cr, uid, where, context=ctx)
res = []
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
klass = dirr.get_node_class(dirr, context=ctx)
res.append(klass(dirr.name, self, self.context,dirr))
return res
def _file_get(self, cr, nodename=False):
res = []
return res
def _get_ttag(self, cr):
return 'db-%s' % cr.dbname
def mkdosname(company_name, default='noname'):
""" convert a string to a dos-like name"""
if not company_name:
return default
badchars = ' !@#$%^`~*()+={}[];:\'"/?.<>'
n = ''
for c in company_name[:8]:
n += (c in badchars and '_') or c
return n
def _uid2unixperms(perms, has_owner):
""" Convert the uidperms and the owner flag to full unix bits
"""
res = 0
if has_owner:
res |= (perms & 0x07) << 6
res |= (perms & 0x05) << 3
elif perms & 0x02:
res |= (perms & 0x07) << 6
res |= (perms & 0x07) << 3
else:
res |= (perms & 0x07) << 6
res |= (perms & 0x05) << 3
res |= 0x05
return res
class node_dir(node_database):
our_type = 'collection'
def __init__(self, path, parent, context, dirr, dctx=None):
super(node_dir,self).__init__(path, parent,context)
self.dir_id = dirr and dirr.id or False
#todo: more info from dirr
self.mimetype = 'application/x-directory'
# 'httpd/unix-directory'
self.create_date = dirr and dirr.create_date or False
self.domain = dirr and dirr.domain or []
self.res_model = dirr and dirr.ressource_type_id and dirr.ressource_type_id.model or False
# TODO: the write date should be MAX(file.write)..
self.write_date = dirr and (dirr.write_date or dirr.create_date) or False
self.content_length = 0
try:
self.uuser = (dirr.user_id and dirr.user_id.login) or 'nobody'
except Exception:
self.uuser = 'nobody'
self.ugroup = mkdosname(dirr.company_id and dirr.company_id.name, default='nogroup')
self.uidperms = dirr.get_dir_permissions()
self.unixperms = 040000 | _uid2unixperms(self.uidperms, dirr and dirr.user_id)
if dctx:
self.dctx.update(dctx)
dc2 = self.context.context
dc2.update(self.dctx)
dc2['dir_id'] = self.dir_id
self.displayname = dirr and dirr.name or False
if dirr and dirr.dctx_ids:
for dfld in dirr.dctx_ids:
try:
self.dctx[dfld.field] = safe_eval(dfld.expr,dc2)
except Exception,e:
print "Cannot eval %s." % dfld.expr
print e
pass
def __eq__(self, other):
if type(self) != type(other):
return False
if not self.context == other.context:
return False
# Two directory nodes, for the same document.directory, may have a
# different context! (dynamic folders)
if self.dctx != other.dctx:
return False
return self.dir_id == other.dir_id
def get_data(self, cr):
#res = ''
#for child in self.children(cr):
# res += child.get_data(cr)
return None
def _file_get(self, cr, nodename=False):
res = super(node_dir,self)._file_get(cr, nodename)
is_allowed = self.check_perms(nodename and 1 or 5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('directory_id','=',self.dir_id) ]
ids = cntobj.search(cr, uid, where, context=ctx)
for content in cntobj.browse(cr, uid, ids, context=ctx):
res3 = cntobj._file_get(cr, self, nodename, content)
if res3:
res.extend(res3)
return res
def _child_get(self, cr, name=None, domain=None):
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('parent_id','=',self.dir_id)]
if name:
where.append(('name','=',name))
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.")
if not domain:
domain = []
where2 = where + domain + [('ressource_parent_type_id','=',False)]
ids = dirobj.search(cr, uid, where2, context=ctx)
res = []
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
klass = dirr.get_node_class(dirr, context=ctx)
res.append(klass(dirr.name, self, self.context,dirr))
# Static directories should never return files with res_model/res_id
# because static dirs are /never/ related to a record.
# In fact, files related to some model and parented by the root dir
# (the default), will NOT be accessible in the node system unless
# a resource folder for that model exists (with resource_find_all=True).
# Having resource attachments in a common folder is bad practice,
# because they would be visible to all users, and their names may be
# the same, conflicting.
where += [('res_model', '=', False)]
fil_obj = dirobj.pool.get('ir.attachment')
ids = fil_obj.search(cr, uid, where, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
klass = self.context.node_file_class
res.append(klass(fil.name, self, self.context, fil))
return res
def rmcol(self, cr):
uid = self.context.uid
directory = self.context._dirobj.browse(cr, uid, self.dir_id)
res = False
if not directory:
raise OSError(2, 'Not such file or directory.')
if not self.check_perms('u'):
raise IOError(errno.EPERM,"Permission denied.")
if directory._table_name=='document.directory':
if self.children(cr):
raise OSError(39, 'Directory not empty.')
res = self.context._dirobj.unlink(cr, uid, [directory.id])
else:
raise OSError(1, 'Operation is not permitted.')
return res
def create_child_collection(self, cr, objname):
object2 = False
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation is not permitted.')
#objname = uri2[-1]
val = {
'name': objname,
'ressource_parent_type_id': obj and obj.ressource_type_id.id or False,
'ressource_id': object2 and object2.id or False,
'parent_id' : obj and obj.id or False
}
return dirobj.create(cr, uid, val)
def create_child(self, cr, path, data=None):
""" API function to create a child file object and node
Return the node_* created
"""
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
fil_obj=dirobj.pool.get('ir.attachment')
val = {
'name': path,
'datas_fname': path,
'parent_id': self.dir_id,
# Datas are not set here
}
fil_id = fil_obj.create(cr, uid, val, context=ctx)
fil = fil_obj.browse(cr, uid, fil_id, context=ctx)
fnode = node_file(path, self, self.context, fil)
if data is not None:
fnode.set_data(cr, data, fil)
return fnode
def _get_ttag(self, cr):
return 'dir-%d' % self.dir_id
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
""" Move directory. This operation is simple, since the present node is
only used for static, simple directories.
Note /may/ be called with ndir_node = None, to rename the document root.
"""
if ndir_node and (ndir_node.context != self.context):
raise NotImplementedError("Cannot move directories between contexts.")
if (not self.check_perms('u')) or (not ndir_node.check_perms('w')):
raise IOError(errno.EPERM,"Permission denied.")
dir_obj = self.context._dirobj
if not fil_obj:
dbro = dir_obj.browse(cr, self.context.uid, self.dir_id, context=self.context.context)
else:
dbro = dir_obj
assert dbro.id == self.dir_id
if not dbro:
raise IndexError("Cannot locate dir %d", self.dir_id)
if (not self.parent) and ndir_node:
if not dbro.parent_id:
raise IOError(errno.EPERM, "Cannot move the root directory!")
self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent
if self.parent != ndir_node:
_logger.debug('Cannot move dir %r from %r to %r.', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir.')
ret = {}
if new_name and (new_name != dbro.name):
if ndir_node.child(cr, new_name):
raise IOError(errno.EEXIST, "Destination path already exists.")
ret['name'] = new_name
del dbro
if not in_write:
# We have to update the data ourselves
if ret:
ctx = self.context.context.copy()
ctx['__from_node'] = True
dir_obj.write(cr, self.context.uid, [self.dir_id,], ret, ctx)
ret = True
return ret
class node_res_dir(node_class):
""" A folder containing dynamic folders
A special sibling to node_dir, which does only contain dynamically
created folders foreach resource in the foreign model.
All folders should be of type node_res_obj and merely behave like
node_dirs (with limited domain).
"""
our_type = 'collection'
res_obj_class = None
def __init__(self, path, parent, context, dirr, dctx=None ):
super(node_res_dir,self).__init__(path, parent, context)
self.dir_id = dirr.id
#todo: more info from dirr
self.mimetype = 'application/x-directory'
# 'httpd/unix-directory'
self.create_date = dirr.create_date
# TODO: the write date should be MAX(file.write)..
self.write_date = dirr.write_date or dirr.create_date
self.content_length = 0
try:
self.uuser = (dirr.user_id and dirr.user_id.login) or 'nobody'
except Exception:
self.uuser = 'nobody'
self.ugroup = mkdosname(dirr.company_id and dirr.company_id.name, default='nogroup')
self.uidperms = dirr.get_dir_permissions()
self.unixperms = 040000 | _uid2unixperms(self.uidperms, dirr and dirr.user_id)
self.res_model = dirr.ressource_type_id and dirr.ressource_type_id.model or False
self.resm_id = dirr.ressource_id
self.res_find_all = dirr.resource_find_all
self.namefield = dirr.resource_field.name or 'name'
self.displayname = dirr.name
# Important: the domain is evaluated using the *parent* dctx!
self.domain = dirr.domain
self.ressource_tree = dirr.ressource_tree
# and then, we add our own vars in the dctx:
if dctx:
self.dctx.update(dctx)
# and then, we prepare a dctx dict, for deferred evaluation:
self.dctx_dict = {}
for dfld in dirr.dctx_ids:
self.dctx_dict[dfld.field] = dfld.expr
def __eq__(self, other):
if type(self) != type(other):
return False
if not self.context == other.context:
return False
# Two nodes, for the same document.directory, may have a
# different context! (dynamic folders)
if self.dctx != other.dctx:
return False
return self.dir_id == other.dir_id
def children(self, cr, domain=None):
return self._child_get(cr, domain=domain)
def child(self, cr, name, domain=None):
res = self._child_get(cr, name, domain=domain)
if res:
return res[0]
return None
def _child_get(self, cr, name=None, domain=None):
""" return virtual children of resource, based on the
foreign object.
Note that many objects use NULL for a name, so we should
better call the name_search(),name_get() set of methods
"""
if self.res_model not in self.context._dirobj.pool:
return []
obj = self.context._dirobj.pool[self.res_model]
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
ctx.update(self.context.extra_ctx)
where = []
if self.domain:
app = safe_eval(self.domain, ctx)
if not app:
pass
elif isinstance(app, list):
where.extend(app)
elif isinstance(app, tuple):
where.append(app)
else:
raise RuntimeError("Incorrect domain expr: %s." % self.domain)
if self.resm_id:
where.append(('id','=',self.resm_id))
if name:
# The =like character will match underscores against any characters
# including the special ones that couldn't exist in a FTP/DAV request
where.append((self.namefield,'=like',name.replace('\\','\\\\')))
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied.")
# print "Where clause for %s" % self.res_model, where
if self.ressource_tree:
object2 = False
if self.resm_id:
object2 = dirobj.pool[self.res_model].browse(cr, uid, self.resm_id) or False
if obj._parent_name in obj.fields_get(cr, uid):
where.append((obj._parent_name,'=',object2 and object2.id or False))
resids = obj.search(cr, uid, where, context=ctx)
res = []
for bo in obj.browse(cr, uid, resids, context=ctx):
if not bo:
continue
res_name = getattr(bo, self.namefield)
if not res_name:
continue
# Yes! we can't do better but skip nameless records.
# Escape the name for characters not supported in filenames
res_name = res_name.replace('/','_') # any other weird char?
if name and (res_name != ustr(name)):
# we have matched _ to any character, but we only meant to match
# the special ones.
# Eg. 'a_c' will find 'abc', 'a/c', 'a_c', may only
# return 'a/c' and 'a_c'
continue
res.append(self.res_obj_class(res_name, self.dir_id, self, self.context, self.res_model, bo))
return res
def _get_ttag(self, cr):
return 'rdir-%d' % self.dir_id
class node_res_obj(node_class):
""" A dynamically created folder.
A special sibling to node_dir, which does only contain dynamically
created folders foreach resource in the foreign model.
All folders should be of type node_res_obj and merely behave like
node_dirs (with limited domain).
"""
our_type = 'collection'
def __init__(self, path, dir_id, parent, context, res_model, res_bo, res_id=None):
super(node_res_obj,self).__init__(path, parent,context)
assert parent
#todo: more info from dirr
self.dir_id = dir_id
self.mimetype = 'application/x-directory'
# 'httpd/unix-directory'
self.create_date = parent.create_date
# TODO: the write date should be MAX(file.write)..
self.write_date = parent.write_date
self.content_length = 0
self.uidperms = parent.uidperms & 15
self.unixperms = 040000 | _uid2unixperms(self.uidperms, True)
self.uuser = parent.uuser
self.ugroup = parent.ugroup
self.res_model = res_model
self.domain = parent.domain
self.displayname = path
self.dctx_dict = parent.dctx_dict
if isinstance(parent, node_res_dir):
self.res_find_all = parent.res_find_all
else:
self.res_find_all = False
if res_bo:
self.res_id = res_bo.id
dc2 = self.context.context.copy()
dc2.update(self.dctx)
dc2['res_model'] = res_model
dc2['res_id'] = res_bo.id
dc2['this'] = res_bo
for fld,expr in self.dctx_dict.items():
try:
self.dctx[fld] = safe_eval(expr, dc2)
except Exception,e:
print "Cannot eval %s for %s." % (expr, fld)
print e
pass
else:
self.res_id = res_id
def __eq__(self, other):
if type(self) != type(other):
return False
if not self.context == other.context:
return False
if not self.res_model == other.res_model:
return False
if not self.res_id == other.res_id:
return False
if self.domain != other.domain:
return False
if self.res_find_all != other.res_find_all:
return False
if self.dctx != other.dctx:
return False
return self.dir_id == other.dir_id
def children(self, cr, domain=None):
return self._child_get(cr, domain=domain) + self._file_get(cr)
def child(self, cr, name, domain=None):
res = self._child_get(cr, name, domain=domain)
if res:
return res[0]
res = self._file_get(cr, name)
if res:
return res[0]
return None
def _file_get(self, cr, nodename=False):
res = []
is_allowed = self.check_perms((nodename and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('directory_id','=',self.dir_id) ]
#if self.domain:
# where.extend(self.domain)
# print "res_obj file_get clause", where
ids = cntobj.search(cr, uid, where, context=ctx)
for content in cntobj.browse(cr, uid, ids, context=ctx):
res3 = cntobj._file_get(cr, self, nodename, content, context=ctx)
if res3:
res.extend(res3)
return res
def get_dav_props_DEPR(self, cr):
# Deprecated! (but document_ics must be cleaned, first)
res = {}
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('directory_id','=',self.dir_id) ]
ids = cntobj.search(cr, uid, where, context=ctx)
for content in cntobj.browse(cr, uid, ids, context=ctx):
if content.extension == '.ics': # FIXME: call the content class!
res['http://groupdav.org/'] = ('resourcetype',)
return res
def get_dav_eprop_DEPR(self, cr, ns, prop):
# Deprecated!
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
_logger.warning("Who asks for %s:%s?" % (ns, prop))
return None
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('directory_id','=',self.dir_id) ]
ids = cntobj.search(cr,uid,where,context=ctx)
for content in cntobj.browse(cr, uid, ids, context=ctx):
# TODO: remove relic of GroupDAV
if content.extension == '.ics': # FIXME: call the content class!
return ('vevent-collection','http://groupdav.org/')
return None
def _child_get(self, cr, name=None, domain=None):
dirobj = self.context._dirobj
is_allowed = self.check_perms((name and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
directory = dirobj.browse(cr, uid, self.dir_id)
obj = dirobj.pool[self.res_model]
where = []
res = []
if name:
where.append(('name','=',name))
# Directory Structure display in tree structure
if self.res_id and directory.ressource_tree:
where1 = []
if name:
where1.append(('name','=like',name.replace('\\','\\\\')))
if obj._parent_name in obj.fields_get(cr, uid):
where1.append((obj._parent_name, '=', self.res_id))
namefield = directory.resource_field.name or 'name'
resids = obj.search(cr, uid, where1, context=ctx)
for bo in obj.browse(cr, uid, resids, context=ctx):
if not bo:
continue
res_name = getattr(bo, namefield)
if not res_name:
continue
res_name = res_name.replace('/', '_')
if name and (res_name != ustr(name)):
continue
# TODO Revise
klass = directory.get_node_class(directory, dynamic=True, context=ctx)
rnode = klass(res_name, dir_id=self.dir_id, parent=self, context=self.context,
res_model=self.res_model, res_bo=bo)
rnode.res_find_all = self.res_find_all
res.append(rnode)
where2 = where + [('parent_id','=',self.dir_id) ]
ids = dirobj.search(cr, uid, where2, context=ctx)
bo = obj.browse(cr, uid, self.res_id, context=ctx)
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
if name and (name != dirr.name):
continue
if dirr.type == 'directory':
klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = bo, res_id = self.res_id))
elif dirr.type == 'ressource':
# child resources can be controlled by properly set dctx
klass = dirr.get_node_class(dirr, context=ctx)
res.append(klass(dirr.name,self,self.context, dirr, {'active_id': self.res_id})) # bo?
fil_obj = dirobj.pool.get('ir.attachment')
if self.res_find_all:
where2 = where
where3 = where2 + [('res_model', '=', self.res_model), ('res_id','=',self.res_id)]
# print "where clause for dir_obj", where3
ids = fil_obj.search(cr, uid, where3, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
klass = self.context.node_file_class
res.append(klass(fil.name, self, self.context, fil))
# Get Child Ressource Directories
if directory.ressource_type_id and directory.ressource_type_id.id:
where4 = where + [('ressource_parent_type_id','=',directory.ressource_type_id.id)]
where5 = where4 + ['|', ('ressource_id','=',0), ('ressource_id','=',self.res_id)]
dirids = dirobj.search(cr,uid, where5)
for dirr in dirobj.browse(cr, uid, dirids, context=ctx):
if dirr.type == 'directory' and not dirr.parent_id:
klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
rnode = klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = bo, res_id = self.res_id)
rnode.res_find_all = dirr.resource_find_all
res.append(rnode)
if dirr.type == 'ressource':
klass = dirr.get_node_class(dirr, context=ctx)
rnode = klass(dirr.name, self, self.context, dirr, {'active_id': self.res_id})
rnode.res_find_all = dirr.resource_find_all
res.append(rnode)
return res
def create_child_collection(self, cr, objname):
dirobj = self.context._dirobj
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
res_obj = dirobj.pool[self.res_model]
object2 = res_obj.browse(cr, uid, self.res_id) or False
obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation is not permitted.')
val = {
'name': objname,
'ressource_parent_type_id': obj and obj.ressource_type_id.id or False,
'ressource_id': object2 and object2.id or False,
'parent_id' : False,
'resource_find_all': False,
}
if (obj and (obj.type in ('directory'))) or not object2:
val['parent_id'] = obj and obj.id or False
return dirobj.create(cr, uid, val)
def create_child(self, cr, path, data=None):
""" API function to create a child file object and node
Return the node_* created
"""
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
fil_obj=dirobj.pool.get('ir.attachment')
val = {
'name': path,
'datas_fname': path,
'res_model': self.res_model,
'res_id': self.res_id,
# Datas are not set here
}
if not self.res_find_all:
val['parent_id'] = self.dir_id
fil_id = fil_obj.create(cr, uid, val, context=ctx)
fil = fil_obj.browse(cr, uid, fil_id, context=ctx)
klass = self.context.node_file_class
fnode = klass(path, self, self.context, fil)
if data is not None:
fnode.set_data(cr, data, fil)
return fnode
def _get_ttag(self, cr):
return 'rodir-%d-%d' % (self.dir_id, self.res_id)
node_res_dir.res_obj_class = node_res_obj
class node_file(node_class):
our_type = 'file'
def __init__(self, path, parent, context, fil):
super(node_file,self).__init__(path, parent,context)
self.file_id = fil.id
#todo: more info from ir_attachment
if fil.file_type and '/' in fil.file_type:
self.mimetype = str(fil.file_type)
self.create_date = fil.create_date
self.write_date = fil.write_date or fil.create_date
self.content_length = fil.file_size
self.displayname = fil.name
self.uidperms = 14
if parent:
if not parent.check_perms('x'):
self.uidperms = 0
elif not parent.check_perms('w'):
self.uidperms = 4
try:
self.uuser = (fil.user_id and fil.user_id.login) or 'nobody'
except Exception:
self.uuser = 'nobody'
self.ugroup = mkdosname(fil.company_id and fil.company_id.name, default='nogroup')
def __eq__(self, other):
if type(self) != type(other):
return False
if not self.context == other.context:
return False
if self.dctx != other.dctx:
return False
return self.file_id == other.file_id
def open_data(self, cr, mode):
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied.")
stobj = self.context._dirobj.pool.get('document.storage')
return stobj.get_file(cr, self.context.uid, None, self, mode=mode, context=self.context.context)
def rm(self, cr):
uid = self.context.uid
if not self.check_perms(8):
raise IOError(errno.EPERM, "Permission denied.")
document_obj = self.context._dirobj.pool.get('ir.attachment')
if self.type in ('collection','database'):
return False
document = document_obj.browse(cr, uid, self.file_id, context=self.context.context)
res = False
if document and document._table_name == 'ir.attachment':
res = document_obj.unlink(cr, uid, [document.id])
return res
def fix_ppath(self, cr, fbro):
"""Sometimes we may init this w/o path, parent.
This function fills the missing path from the file browse object
Note: this may be an expensive operation, do on demand. However,
once caching is in, we might want to do that at init time and keep
this object anyway
"""
if self.path or self.parent:
return
assert fbro
uid = self.context.uid
dirpath = []
if fbro.parent_id:
dirobj = self.context._dirobj.pool.get('document.directory')
dirpath = dirobj.get_full_path(cr, uid, fbro.parent_id.id, context=self.context.context)
if fbro.datas_fname:
dirpath.append(fbro.datas_fname)
else:
dirpath.append(fbro.name)
if len(dirpath)>1:
self.path = dirpath
else:
self.path = dirpath[0]
def get_data(self, cr, fil_obj=None):
""" Retrieve the data for some file.
fil_obj may optionally be specified, and should be a browse object
for the file. This is useful when the caller has already initiated
the browse object. """
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied.")
stobj = self.context._dirobj.pool.get('document.storage')
return stobj.get_data(cr, self.context.uid, None, self,self.context.context, fil_obj)
def get_data_len(self, cr, fil_obj=None):
bin_size = self.context.context.get('bin_size', False)
if bin_size and not self.content_length:
self.content_length = fil_obj.db_datas
return self.content_length
def set_data(self, cr, data, fil_obj=None):
""" Store data at some file.
fil_obj may optionally be specified, and should be a browse object
for the file. This is useful when the caller has already initiated
the browse object. """
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied.")
stobj = self.context._dirobj.pool.get('document.storage')
return stobj.set_data(cr, self.context.uid, None, self, data, self.context.context, fil_obj)
def _get_ttag(self, cr):
return 'file-%d' % self.file_id
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
if ndir_node and ndir_node.context != self.context:
raise NotImplementedError("Cannot move files between contexts.")
if (not self.check_perms(8)) and ndir_node.check_perms(2):
raise IOError(errno.EPERM, "Permission denied.")
doc_obj = self.context._dirobj.pool.get('ir.attachment')
if not fil_obj:
dbro = doc_obj.browse(cr, self.context.uid, self.file_id, context=self.context.context)
else:
dbro = fil_obj
assert dbro.id == self.file_id, "%s != %s for %r." % (dbro.id, self.file_id, self)
if not dbro:
raise IndexError("Cannot locate doc %d.", self.file_id)
if (not self.parent):
# there *must* be a parent node for this one
self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent
ret = {}
if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
_logger.debug('Cannot move file %r from %r to %r.', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders.')
if not ndir_obj:
ndir_obj = self.context._dirobj.browse(cr, self.context.uid, \
ndir_node.dir_id, context=self.context.context)
assert ndir_obj.id == ndir_node.dir_id
r2 = { 'parent_id': ndir_obj.id }
ret.update(r2)
if new_name and (new_name != dbro.name):
if len(ret):
raise NotImplementedError("Cannot rename and move.") # TODO
r2 = { 'name': new_name, 'datas_fname': new_name }
ret.update(r2)
del dbro
if not in_write:
# We have to update the data ourselves
if ret:
ctx = self.context.context.copy()
ctx['__from_node'] = True
doc_obj.write(cr, self.context.uid, [self.file_id,], ret, ctx )
ret = True
return ret
class node_content(node_class):
our_type = 'content'
def __init__(self, path, parent, context, cnt, dctx=None, act_id=None):
super(node_content,self).__init__(path, parent,context)
self.cnt_id = cnt.id
self.create_date = False
self.write_date = False
self.content_length = False
self.unixperms = 0640
if parent:
self.uidperms = parent.uidperms & 14
self.uuser = parent.uuser
self.ugroup = parent.ugroup
self.extension = cnt.extension
self.report_id = cnt.report_id and cnt.report_id.id
#self.mimetype = cnt.extension.
self.displayname = path
if dctx:
self.dctx.update(dctx)
self.act_id = act_id
def fill_fields(self, cr, dctx=None):
""" Try to read the object and fill missing fields, like mimetype,
dates etc.
This function must be different from the constructor, because
it uses the db cursor.
"""
cr.execute('SELECT DISTINCT mimetype FROM document_directory_content_type WHERE active AND code = %s;',
(self.extension,))
res = cr.fetchall()
if res and res[0][0]:
self.mimetype = str(res[0][0])
def get_data(self, cr, fil_obj=None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
data = cntobj.process_read(cr, self.context.uid, self, ctx)
if data:
self.content_length = len(data)
return data
def open_data(self, cr, mode):
if mode.endswith('b'):
mode = mode[:-1]
if mode in ('r', 'w'):
cperms = mode[:1]
elif mode in ('r+', 'w+'):
cperms = 'rw'
else:
raise IOError(errno.EINVAL, "Cannot open at mode %s." % mode)
if not self.check_perms(cperms):
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
return nodefd_content(self, cr, mode, ctx)
def get_data_len(self, cr, fil_obj=None):
# FIXME : here, we actually generate the content twice!!
# we should have cached the generated content, but it is
# not advisable to do keep it in memory, until we have a cache
# expiration logic.
if not self.content_length:
self.get_data(cr,fil_obj)
return self.content_length
def set_data(self, cr, data, fil_obj=None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
return cntobj.process_write(cr, self.context.uid, self, data, ctx)
def _get_ttag(self, cr):
return 'cnt-%d%s' % (self.cnt_id,(self.act_id and ('-' + str(self.act_id))) or '')
def get_dav_resourcetype(self, cr):
return ''
class node_descriptor(object):
"""A file-like interface to the data contents of a node.
This class is NOT a node, but an /open descriptor/ for some
node. It can hold references to a cursor or a file object,
because the life of a node_descriptor will be the open period
of the data.
It should also take care of locking, with any native mechanism
or using the db.
For the implementation, it would be OK just to wrap around file,
StringIO or similar class. The node_descriptor is only needed to
provide the link to the parent /node/ object.
"""
def __init__(self, parent):
assert isinstance(parent, node_class)
self.name = parent.displayname
self.__parent = parent
def _get_parent(self):
return self.__parent
def open(self, **kwargs):
raise NotImplementedError
def close(self):
raise NotImplementedError
def read(self, size=None):
raise NotImplementedError
def seek(self, offset, whence=None):
raise NotImplementedError
def tell(self):
raise NotImplementedError
def write(self, str):
raise NotImplementedError
def size(self):
raise NotImplementedError
def __len__(self):
return self.size()
def __nonzero__(self):
""" Ensure that a node_descriptor will never equal False
Since we do define __len__ and __iter__ for us, we must avoid
being regarded as non-true objects.
"""
return True
def next(self, str):
raise NotImplementedError
class nodefd_content(StringIO, node_descriptor):
""" A descriptor to content nodes
"""
def __init__(self, parent, cr, mode, ctx):
node_descriptor.__init__(self, parent)
self._context=ctx
self._size = 0L
if mode in ('r', 'r+'):
cntobj = parent.context._dirobj.pool.get('document.directory.content')
data = cntobj.process_read(cr, parent.context.uid, parent, ctx)
if data:
self._size = len(data)
parent.content_length = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
# at write, we start at 0 (= overwrite), but have the original
# data available, in case of a seek()
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
if self.mode == 'r':
StringIO.close(self)
return
par = self._get_parent()
uid = par.context.uid
cr = openerp.registry(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
data = self.getvalue()
cntobj = par.context._dirobj.pool.get('document.directory.content')
cntobj.process_write(cr, uid, par, data, par.context.context)
elif self.mode == 'a':
raise NotImplementedError
cr.commit()
except Exception:
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
raise
finally:
cr.close()
StringIO.close(self)
class nodefd_static(StringIO, node_descriptor):
""" A descriptor to nodes with static data.
"""
def __init__(self, parent, cr, mode, ctx=None):
node_descriptor.__init__(self, parent)
self._context=ctx
self._size = 0L
if mode in ('r', 'r+'):
data = parent.get_data(cr)
if data:
self._size = len(data)
parent.content_length = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
# at write, we start at 0 (= overwrite), but have the original
# data available, in case of a seek()
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
if self.mode == 'r':
StringIO.close(self)
return
par = self._get_parent()
# uid = par.context.uid
cr = openerp.registry(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
data = self.getvalue()
par.set_data(cr, data)
elif self.mode == 'a':
raise NotImplementedError
cr.commit()
except Exception:
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
raise
finally:
cr.close()
StringIO.close(self)
class nodefd_db(StringIO, node_descriptor):
""" A descriptor to db data
"""
def __init__(self, parent, ira_browse, mode):
node_descriptor.__init__(self, parent)
self._size = 0L
if mode.endswith('b'):
mode = mode[:-1]
if mode in ('r', 'r+'):
data = ira_browse.datas
if data:
data = data.decode('base64')
self._size = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
# at write, we start at 0 (= overwrite), but have the original
# data available, in case of a seek()
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
par = self._get_parent()
# uid = par.context.uid
registry = openerp.modules.registry.RegistryManager.get(par.context.dbname)
with registry.cursor() as cr:
data = self.getvalue().encode('base64')
if self.mode in ('w', 'w+', 'r+'):
registry.get('ir.attachment').write(cr, 1, par.file_id, {'datas': data})
cr.commit()
StringIO.close(self)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| eLBati/odoo | addons/document/document.py | Python | agpl-3.0 | 83,826 |
#!/usr/bin/env python
# coding: utf-8
# Datasets
# ============================
#
# Datasets tell PHOEBE how and at what times to compute the model. In some cases these will include the actual observational data, and in other cases may only include the times at which you want to compute a synthetic model.
#
# Adding a dataset - even if it doesn't contain any observational data - is required in order to compute a synthetic model (which will be described in the [Compute Tutorial](compute.ipynb)).
#
# Setup
# -----------------------------
# Let's first make sure we have the latest version of PHOEBE 2.3 installed (uncomment this line if running in an online notebook session such as colab).
# In[1]:
#!pip install -I "phoebe>=2.3,<2.4"
# In[2]:
import phoebe
from phoebe import u # units
logger = phoebe.logger()
b = phoebe.default_binary()
# ## Adding a Dataset from Arrays
#
#
# To add a dataset, you need to provide the function in
# [phoebe.parameters.dataset](../api/phoebe.parameters.dataset.md) for the particular type of data you're dealing with, as well
# as any of your "observed" arrays.
#
# The current available methods include:
#
# * [lc](../api/phoebe.parameters.dataset.lc.md) light curves ([tutorial](LC.ipynb))
# * [rv](../api/phoebe.parameters.dataset.rv.md) radial velocity curves ([tutorial](RV.ipynb))
# * [lp](../api/phoebe.parameters.dataset.lp.md) spectral line profiles ([tutorial](LP.ipynb))
# * [orb](../api/phoebe.parameters.dataset.orb.md) orbit/positional data ([tutorial](ORB.ipynb))
# * [mesh](../api/phoebe.parameters.dataset.mesh.md) discretized mesh of stars ([tutorial](MESH.ipynb))
#
# which can always be listed via [phoebe.list_available_datasets](../api/phoebe.list_available_datasets.md)
# In[17]:
phoebe.list_available_datasets()
# ### Without Observations
#
# The simplest case of adding a dataset is when you do not have observational "data" and only want to compute a synthetic model. Here all you need to provide is an array of times and information about the type of data and how to compute it.
#
# Here we'll do just that - we'll add an orbit dataset which will track the positions and velocities of both our 'primary' and 'secondary' stars (by their component tags) at each of the provided times.
#
# Unlike other datasets, the mesh and orb dataset cannot accept actual observations, so there is no `times` parameter, only the `compute_times` and `compute_phases` parameters. For more details on these, see the [Advanced: Compute Times & Phases tutorial](compute_times_phases.ipynb).
# In[3]:
b.add_dataset(phoebe.dataset.orb,
compute_times=phoebe.linspace(0,10,20),
dataset='orb01',
component=['primary', 'secondary'])
# Here we used [phoebe.linspace](../api/phoebe.linspace.md). This is essentially just a shortcut to [np.linspace](https://numpy.org/doc/stable/reference/generated/numpy.linspace.html), but using [nparray](https://nparray.readthedocs.io) to allow these generated arrays to be serialized and stored easier within the Bundle. Other nparray constructor functions available at the top-level of PHOEBE include:
#
# * [phoebe.arange](../api/phoebe.arange.md)
# * [phoebe.invspace](../api/phoebe.invspace.md)
# * [phoebe.linspace](../api/phoebe.linspace.md)
# * [phoebe.logspace](../api/phoebe.logspace.md)
# * [phoebe.geomspace](../api/phoebe.geomspace.md)
#
# Any nparray object, list, or numpy array is acceptable as input to [FloatArrayParameters](../api/phoebe.parameters.FloatArrayParameter.md).
# [b.add_dataset](../api/phoebe.frontend.bundle.Bundle.add_dataset.md) can either take a function or the name of a function in [phoebe.parameters.dataset](../api/phoebe.parameters.dataset.md) as its first argument. The following line would do the same thing (and we'll pass `overwrite=True` to avoid the error of overwriting `dataset='orb01'`).
# In[4]:
b.add_dataset('orb',
compute_times=phoebe.linspace(0,10,20),
component=['primary', 'secondary'],
dataset='orb01',
overwrite=True)
# You may notice that `add_dataset` does take some time to complete. In the background, the passband is being loaded (when applicable) and many parameters are created and attached to the Bundle.
# If you do not provide a list of component(s), they will be assumed for you based on the dataset method. [LCs](LC.ipynb) (light curves) and [meshes](MESH.ipynb) can only attach at the system level (component=None), for instance, whereas [RVs](RV.ipynb) and [ORBs](ORB.ipynb) can attach for each star.
# In[5]:
b.add_dataset('rv', times=phoebe.linspace(0,10,20), dataset='rv01')
# In[6]:
print(b.filter(qualifier='times', dataset='rv01').components)
# Here we added an RV dataset and can see that it was automatically created for both stars in our system. Under-the-hood, another entry is created for component='\_default'. The default parameters hold the values that will be replicated if a new component is added to the system in the future. In order to see these hidden parameters, you need to pass check_default=False to any filter-type call (and note that '\_default' is no longer exposed when calling `.components`). Also note that for set_value_all, this is automatically set to False.
#
# Since we did not explicitly state that we only wanted the primary and secondary components, the time array on '\_default' is filled as well. If we were then to add a tertiary component, its RVs would automatically be computed because of this replicated time array.
# In[7]:
print(b.filter(qualifier='times', dataset='rv01', check_default=False).components)
# In[8]:
print(b.get('times@_default@rv01', check_default=False))
# ### With Observations
#
# Loading datasets with observations is (nearly) as simple.
#
# Passing arrays to any of the dataset columns will apply it to all of the same components in which the time will be applied (see the 'Without Observations' section above for more details). This make perfect sense for fluxes in light curves where the time and flux arrays are both at the system level:
# In[9]:
b.add_dataset('lc', times=[0,1], fluxes=[1,0.5], dataset='lc01')
# In[10]:
print(b.get_parameter(qualifier='fluxes', dataset='lc01', context='dataset'))
# For datasets which attach to individual components, however, this isn't always the desired behavior.
#
# For a single-lined RV where we only attach to one component, everything is as expected.
# In[11]:
b.add_dataset('rv',
times=[0,1],
rvs=[-3,3],
component='primary',
dataset='rv01',
overwrite=True)
# In[12]:
print(b.get_parameter(qualifier='rvs', dataset='rv01', context='dataset'))
# However, for a double-lined RV we probably **don't** want to do the following:
# In[13]:
b.add_dataset('rv',
times=[0,0.5,1],
rvs=[-3,3],
dataset='rv02')
# In[14]:
print(b.filter(qualifier='rvs', dataset='rv02', context='dataset'))
# Instead we want to pass different arrays to the 'rvs@primary' and 'rvs@secondary'. This can be done by explicitly stating the components in a dictionary sent to that argument:
# In[15]:
b.add_dataset('rv',
times=[0,0.5,1],
rvs={'primary': [-3,3], 'secondary': [4,-4]},
dataset='rv02',
overwrite=True)
# In[16]:
print(b.filter(qualifier='rvs', dataset='rv02', context='dataset'))
# Alternatively, you could of course not pass the values while calling add_dataset and instead call the [set_value](../api/phoebe.parameters.ParameterSet.set_value.md) method after and explicitly state the components at that time. For more details see the [add_dataset API docs](../api/phoebe.frontend.bundle.Bundle.add_dataset.md).
#
# PHOEBE doesn't come with any built-in file parsing, but using common file parsers such as [np.loadtxt](https://numpy.org/doc/stable/reference/generated/numpy.loadtxt.html) or [np.genfromtxt](https://numpy.org/doc/stable/reference/generated/numpy.genfromtxt.html) to extract arrays from an external data file.
# Dataset Types
# ------------------------
#
# For a full explanation of all related options and Parameter see the respective dataset tutorials:
#
# * [Light Curves/Fluxes (lc)](./LC.ipynb)
# * [Radial Velocities (rv)](./RV.ipynb)
# * [Line Profiles (lp)](./LP.ipynb)
# * [Orbits (orb)](./ORB.ipynb)
# * [Meshes (mesh)](./MESH.ipynb)
# Next
# ----------
#
# Next up: let's learn how to [compute observables](compute.ipynb) and create our first synthetic model.
#
# Or see some of these advanced topics:
#
# * [Advanced: Datasets (passband options, dealing with phases, removing datasets)](datasets_advanced.ipynb)
# * [Advanced: Compute Times & Phases](compute_times_phases.ipynb)
# In[ ]:
| phoebe-project/phoebe2-docs | development/tutorials/datasets.py | Python | gpl-3.0 | 8,874 |
import asyncio
import functools
import logging
import re
import signal
import sys
from unittest.mock import MagicMock
import msgpack
import pytest
from arq.connections import ArqRedis, RedisSettings
from arq.constants import abort_jobs_ss, default_queue_name, health_check_key_suffix, job_key_prefix
from arq.jobs import Job, JobStatus
from arq.worker import (
FailedJobs,
JobExecutionFailed,
Retry,
RetryJob,
Worker,
async_check_health,
check_health,
func,
run_worker,
)
async def foobar(ctx):
return 42
async def fails(ctx):
raise TypeError('my type error')
def test_no_jobs(arq_redis: ArqRedis, loop, mocker):
class Settings:
functions = [func(foobar, name='foobar')]
burst = True
poll_delay = 0
queue_read_limit = 10
loop.run_until_complete(arq_redis.enqueue_job('foobar'))
mocker.patch('asyncio.get_event_loop', lambda: loop)
worker = run_worker(Settings)
assert worker.jobs_complete == 1
assert str(worker) == '<Worker j_complete=1 j_failed=0 j_retried=0 j_ongoing=0>'
def test_health_check_direct(loop):
class Settings:
pass
asyncio.set_event_loop(loop)
assert check_health(Settings) == 1
async def test_health_check_fails():
assert 1 == await async_check_health(None)
async def test_health_check_pass(arq_redis):
await arq_redis.set(default_queue_name + health_check_key_suffix, b'1')
assert 0 == await async_check_health(None)
async def test_set_health_check_key(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', _job_id='testing')
worker: Worker = worker(functions=[func(foobar, keep_result=0)], health_check_key='arq:test:health-check')
await worker.main()
assert sorted(await arq_redis.keys('*')) == [b'arq:test:health-check']
async def test_handle_sig(caplog, arq_redis: ArqRedis):
caplog.set_level(logging.INFO)
worker = Worker([foobar], redis_pool=arq_redis)
worker.main_task = MagicMock()
worker.tasks = {0: MagicMock(done=MagicMock(return_value=True)), 1: MagicMock(done=MagicMock(return_value=False))}
assert len(caplog.records) == 0
worker.handle_sig(signal.SIGINT)
assert len(caplog.records) == 1
assert caplog.records[0].message == (
'shutdown on SIGINT ◆ 0 jobs complete ◆ 0 failed ◆ 0 retries ◆ 2 ongoing to cancel'
)
assert worker.main_task.cancel.call_count == 1
assert worker.tasks[0].done.call_count == 1
assert worker.tasks[0].cancel.call_count == 0
assert worker.tasks[1].done.call_count == 1
assert worker.tasks[1].cancel.call_count == 1
async def test_handle_no_sig(caplog):
caplog.set_level(logging.INFO)
worker = Worker([foobar], handle_signals=False)
worker.main_task = MagicMock()
worker.tasks = {0: MagicMock(done=MagicMock(return_value=True)), 1: MagicMock(done=MagicMock(return_value=False))}
assert len(caplog.records) == 0
await worker.close()
assert len(caplog.records) == 1
assert caplog.records[0].message == (
'shutdown on SIGUSR1 ◆ 0 jobs complete ◆ 0 failed ◆ 0 retries ◆ 2 ongoing to cancel'
)
assert worker.main_task.cancel.call_count == 1
assert worker.tasks[0].done.call_count == 1
assert worker.tasks[0].cancel.call_count == 0
assert worker.tasks[1].done.call_count == 1
assert worker.tasks[1].cancel.call_count == 1
async def test_job_successful(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar', _job_id='testing')
worker: Worker = worker(functions=[foobar])
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs → testing:foobar()\n X.XXs ← testing:foobar ● 42' in log
async def test_job_retry(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
if ctx['job_try'] <= 2:
raise Retry(defer=0.01)
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 2
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', '\n'.join(r.message for r in caplog.records))
assert '0.XXs ↻ testing:retry retrying job in 0.XXs\n' in log
assert '0.XXs → testing:retry() try=2\n' in log
assert '0.XXs ← testing:retry ●' in log
async def test_job_retry_dont_retry(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
raise Retry(defer=0.01)
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
with pytest.raises(FailedJobs) as exc_info:
await worker.run_check(retry_jobs=False)
assert str(exc_info.value) == '1 job failed <Retry defer 0.01s>'
assert '↻' not in caplog.text
assert '! testing:retry failed, Retry: <Retry defer 0.01s>\n' in caplog.text
async def test_job_retry_max_jobs(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
raise Retry(defer=0.01)
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
assert await worker.run_check(max_burst_jobs=1) == 0
assert worker.jobs_complete == 0
assert worker.jobs_retried == 1
assert worker.jobs_failed == 0
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', caplog.text)
assert '0.XXs ↻ testing:retry retrying job in 0.XXs\n' in log
assert '0.XXs → testing:retry() try=2\n' not in log
async def test_job_job_not_found(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('missing', _job_id='testing')
worker: Worker = worker(functions=[foobar])
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert "job testing, function 'missing' not found" in log
async def test_job_job_not_found_run_check(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('missing', _job_id='testing')
worker: Worker = worker(functions=[foobar])
with pytest.raises(FailedJobs) as exc_info:
await worker.run_check()
assert exc_info.value.count == 1
assert len(exc_info.value.job_results) == 1
failure = exc_info.value.job_results[0].result
assert failure == JobExecutionFailed("function 'missing' not found")
assert failure != 123 # check the __eq__ method of JobExecutionFailed
async def test_retry_lots(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
raise Retry()
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 5
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert ' X.XXs ! testing:retry max retries 5 exceeded' in log
async def test_retry_lots_without_keep_result(arq_redis: ArqRedis, worker):
async def retry(ctx):
raise Retry()
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')], keep_result=0)
await worker.main() # Should not raise MultiExecError
async def test_retry_lots_check(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
raise Retry()
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
with pytest.raises(FailedJobs, match='max 5 retries exceeded'):
await worker.run_check()
@pytest.mark.skipif(sys.version_info >= (3, 8), reason='3.8 deals with CancelledError differently')
async def test_cancel_error(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
if ctx['job_try'] == 1:
raise asyncio.CancelledError()
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 1
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs ↻ testing:retry cancelled, will be run again' in log
async def test_retry_job_error(arq_redis: ArqRedis, worker, caplog):
async def retry(ctx):
if ctx['job_try'] == 1:
raise RetryJob()
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('retry', _job_id='testing')
worker: Worker = worker(functions=[func(retry, name='retry')])
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 1
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs ↻ testing:retry cancelled, will be run again' in log
async def test_job_expired(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar', _job_id='testing')
await arq_redis.delete(job_key_prefix + 'testing')
worker: Worker = worker(functions=[foobar])
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'job testing expired' in log
async def test_job_expired_run_check(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar', _job_id='testing')
await arq_redis.delete(job_key_prefix + 'testing')
worker: Worker = worker(functions=[foobar])
with pytest.raises(FailedJobs) as exc_info:
await worker.run_check()
assert str(exc_info.value) in {
"1 job failed JobExecutionFailed('job expired',)", # python 3.6
"1 job failed JobExecutionFailed('job expired')", # python 3.7
}
assert exc_info.value.count == 1
assert len(exc_info.value.job_results) == 1
assert exc_info.value.job_results[0].result == JobExecutionFailed('job expired')
async def test_job_old(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar', _job_id='testing', _defer_by=-2)
worker: Worker = worker(functions=[foobar])
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', '\n'.join(r.message for r in caplog.records))
assert log.endswith(' 0.XXs → testing:foobar() delayed=2.XXs\n' ' 0.XXs ← testing:foobar ● 42')
async def test_retry_repr():
assert str(Retry(123)) == '<Retry defer 123.00s>'
async def test_str_function(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('asyncio.sleep', _job_id='testing')
worker: Worker = worker(functions=['asyncio.sleep'])
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', '\n'.join(r.message for r in caplog.records))
assert '0.XXs ! testing:asyncio.sleep failed, TypeError' in log
async def test_startup_shutdown(arq_redis: ArqRedis, worker):
calls = []
async def startup(ctx):
calls.append('startup')
async def shutdown(ctx):
calls.append('shutdown')
await arq_redis.enqueue_job('foobar', _job_id='testing')
worker: Worker = worker(functions=[foobar], on_startup=startup, on_shutdown=shutdown)
await worker.main()
await worker.close()
assert calls == ['startup', 'shutdown']
class CustomError(RuntimeError):
def extra(self):
return {'x': 'y'}
async def error_function(ctx):
raise CustomError('this is the error')
async def test_exc_extra(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('error_function', _job_id='testing')
worker: Worker = worker(functions=[error_function])
await worker.main()
assert worker.jobs_failed == 1
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', '\n'.join(r.message for r in caplog.records))
assert '0.XXs ! testing:error_function failed, CustomError: this is the error' in log
error = next(r for r in caplog.records if r.levelno == logging.ERROR)
assert error.extra == {'x': 'y'}
async def test_unpickleable(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
class Foo:
pass
async def example(ctx):
return Foo()
await arq_redis.enqueue_job('example', _job_id='testing')
worker: Worker = worker(functions=[func(example, name='example')])
await worker.main()
log = re.sub(r'(\d+).\d\ds', r'\1.XXs', '\n'.join(r.message for r in caplog.records))
assert 'error serializing result of testing:example' in log
async def test_log_health_check(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar', _job_id='testing')
worker: Worker = worker(functions=[foobar], health_check_interval=0)
await worker.main()
await worker.main()
await worker.main()
assert worker.jobs_complete == 1
assert 'j_complete=1 j_failed=0 j_retried=0 j_ongoing=0 queued=0' in caplog.text
# assert log.count('recording health') == 1 can happen more than once due to redis pool size
assert 'recording health' in caplog.text
async def test_remain_keys(arq_redis: ArqRedis, worker, create_pool):
redis2 = await create_pool(RedisSettings())
await arq_redis.enqueue_job('foobar', _job_id='testing')
assert sorted(await redis2.keys('*')) == [b'arq:job:testing', b'arq:queue']
worker: Worker = worker(functions=[foobar])
await worker.main()
assert sorted(await redis2.keys('*')) == [b'arq:queue:health-check', b'arq:result:testing']
await worker.close()
assert sorted(await redis2.keys('*')) == [b'arq:result:testing']
async def test_remain_keys_no_results(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', _job_id='testing')
assert sorted(await arq_redis.keys('*')) == [b'arq:job:testing', b'arq:queue']
worker: Worker = worker(functions=[func(foobar, keep_result=0)])
await worker.main()
assert sorted(await arq_redis.keys('*')) == [b'arq:queue:health-check']
async def test_remain_keys_keep_results_forever_in_function(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', _job_id='testing')
assert sorted(await arq_redis.keys('*')) == [b'arq:job:testing', b'arq:queue']
worker: Worker = worker(functions=[func(foobar, keep_result_forever=True)])
await worker.main()
assert sorted(await arq_redis.keys('*')) == [b'arq:queue:health-check', b'arq:result:testing']
ttl_result = await arq_redis.ttl('arq:result:testing')
assert ttl_result == -1
async def test_remain_keys_keep_results_forever(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', _job_id='testing')
assert sorted(await arq_redis.keys('*')) == [b'arq:job:testing', b'arq:queue']
worker: Worker = worker(functions=[func(foobar)], keep_result_forever=True)
await worker.main()
assert sorted(await arq_redis.keys('*')) == [b'arq:queue:health-check', b'arq:result:testing']
ttl_result = await arq_redis.ttl('arq:result:testing')
assert ttl_result == -1
async def test_run_check_passes(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar')
await arq_redis.enqueue_job('foobar')
worker: Worker = worker(functions=[func(foobar, name='foobar')])
assert 2 == await worker.run_check()
async def test_run_check_error(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('fails')
worker: Worker = worker(functions=[func(fails, name='fails')])
with pytest.raises(FailedJobs, match=r"1 job failed TypeError\('my type error'"):
await worker.run_check()
async def test_run_check_error2(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('fails')
await arq_redis.enqueue_job('fails')
worker: Worker = worker(functions=[func(fails, name='fails')])
with pytest.raises(FailedJobs, match='2 jobs failed:\n') as exc_info:
await worker.run_check()
assert len(exc_info.value.job_results) == 2
async def test_keep_result_ms(arq_redis: ArqRedis, worker):
async def return_something(ctx):
return 1
await arq_redis.enqueue_job('return_something')
worker: Worker = worker(functions=[func(return_something, name='return_something')], keep_result=3600.15)
await worker.main()
assert (worker.jobs_complete, worker.jobs_failed, worker.jobs_retried) == (1, 0, 0)
async def test_return_exception(arq_redis: ArqRedis, worker):
async def return_error(ctx):
return TypeError('xxx')
j = await arq_redis.enqueue_job('return_error')
worker: Worker = worker(functions=[func(return_error, name='return_error')])
await worker.main()
assert (worker.jobs_complete, worker.jobs_failed, worker.jobs_retried) == (1, 0, 0)
r = await j.result(poll_delay=0)
assert isinstance(r, TypeError)
info = await j.result_info()
assert info.success is True
async def test_error_success(arq_redis: ArqRedis, worker):
j = await arq_redis.enqueue_job('fails')
worker: Worker = worker(functions=[func(fails, name='fails')])
await worker.main()
assert (worker.jobs_complete, worker.jobs_failed, worker.jobs_retried) == (0, 1, 0)
info = await j.result_info()
assert info.success is False
async def test_many_jobs_expire(arq_redis: ArqRedis, worker, caplog):
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foobar')
await asyncio.gather(*[arq_redis.zadd(default_queue_name, {f'testing-{i}': 1}) for i in range(100)])
worker: Worker = worker(functions=[foobar])
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 100
assert worker.jobs_retried == 0
log = '\n'.join(r.message for r in caplog.records)
assert 'job testing-0 expired' in log
assert log.count(' expired') == 100
async def test_repeat_job_result(arq_redis: ArqRedis, worker):
j1 = await arq_redis.enqueue_job('foobar', _job_id='job_id')
assert isinstance(j1, Job)
assert await j1.status() == JobStatus.queued
assert await arq_redis.enqueue_job('foobar', _job_id='job_id') is None
await worker(functions=[foobar]).run_check()
assert await j1.status() == JobStatus.complete
assert await arq_redis.enqueue_job('foobar', _job_id='job_id') is None
async def test_queue_read_limit_equals_max_jobs(arq_redis: ArqRedis, worker):
for _ in range(4):
await arq_redis.enqueue_job('foobar')
assert await arq_redis.zcard(default_queue_name) == 4
worker: Worker = worker(functions=[foobar], queue_read_limit=2)
assert worker.queue_read_limit == 2
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker._poll_iteration()
await asyncio.sleep(0.1)
assert await arq_redis.zcard(default_queue_name) == 2
assert worker.jobs_complete == 2
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker._poll_iteration()
await asyncio.sleep(0.1)
assert await arq_redis.zcard(default_queue_name) == 0
assert worker.jobs_complete == 4
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
async def test_queue_read_limit_calc(worker):
assert worker(functions=[foobar], queue_read_limit=2, max_jobs=1).queue_read_limit == 2
assert worker(functions=[foobar], queue_read_limit=200, max_jobs=1).queue_read_limit == 200
assert worker(functions=[foobar], max_jobs=18).queue_read_limit == 100
assert worker(functions=[foobar], max_jobs=22).queue_read_limit == 110
async def test_custom_queue_read_limit(arq_redis: ArqRedis, worker):
for _ in range(4):
await arq_redis.enqueue_job('foobar')
assert await arq_redis.zcard(default_queue_name) == 4
worker: Worker = worker(functions=[foobar], max_jobs=4, queue_read_limit=2)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker._poll_iteration()
await asyncio.sleep(0.1)
assert await arq_redis.zcard(default_queue_name) == 2
assert worker.jobs_complete == 2
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker._poll_iteration()
await asyncio.sleep(0.1)
assert await arq_redis.zcard(default_queue_name) == 0
assert worker.jobs_complete == 4
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
async def test_custom_serializers(arq_redis_msgpack: ArqRedis, worker):
j = await arq_redis_msgpack.enqueue_job('foobar', _job_id='job_id')
worker: Worker = worker(
functions=[foobar], job_serializer=msgpack.packb, job_deserializer=functools.partial(msgpack.unpackb, raw=False)
)
info = await j.info()
assert info.function == 'foobar'
assert await worker.run_check() == 1
assert await j.result() == 42
r = await j.info()
assert r.result == 42
class UnpickleFails:
def __init__(self, v):
self.v = v
def __setstate__(self, state):
raise ValueError('this broke')
@pytest.mark.skipif(sys.version_info < (3, 7), reason='repr(exc) is ugly in 3.6')
async def test_deserialization_error(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', UnpickleFails('hello'), _job_id='job_id')
worker: Worker = worker(functions=[foobar])
with pytest.raises(FailedJobs) as exc_info:
await worker.run_check()
assert str(exc_info.value) == "1 job failed DeserializationError('unable to deserialize job')"
async def test_incompatible_serializers_1(arq_redis_msgpack: ArqRedis, worker):
await arq_redis_msgpack.enqueue_job('foobar', _job_id='job_id')
worker: Worker = worker(functions=[foobar])
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
async def test_incompatible_serializers_2(arq_redis: ArqRedis, worker):
await arq_redis.enqueue_job('foobar', _job_id='job_id')
worker: Worker = worker(
functions=[foobar], job_serializer=msgpack.packb, job_deserializer=functools.partial(msgpack.unpackb, raw=False)
)
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
async def test_max_jobs_completes(arq_redis: ArqRedis, worker):
v = 0
async def raise_second_time(ctx):
nonlocal v
v += 1
if v > 1:
raise ValueError('xxx')
await arq_redis.enqueue_job('raise_second_time')
await arq_redis.enqueue_job('raise_second_time')
await arq_redis.enqueue_job('raise_second_time')
worker: Worker = worker(functions=[func(raise_second_time, name='raise_second_time')])
with pytest.raises(FailedJobs) as exc_info:
await worker.run_check(max_burst_jobs=3)
assert repr(exc_info.value).startswith('<2 jobs failed:')
async def test_max_bursts_sub_call(arq_redis: ArqRedis, worker, caplog):
async def foo(ctx, v):
return v + 1
async def bar(ctx, v):
await ctx['redis'].enqueue_job('foo', v + 1)
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('bar', 10)
worker: Worker = worker(functions=[func(foo, name='foo'), func(bar, name='bar')])
assert await worker.run_check(max_burst_jobs=1) == 1
assert worker.jobs_complete == 1
assert worker.jobs_retried == 0
assert worker.jobs_failed == 0
assert 'bar(10)' in caplog.text
assert 'foo' in caplog.text
async def test_max_bursts_multiple(arq_redis: ArqRedis, worker, caplog):
async def foo(ctx, v):
return v + 1
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foo', 1)
await arq_redis.enqueue_job('foo', 2)
worker: Worker = worker(functions=[func(foo, name='foo')])
assert await worker.run_check(max_burst_jobs=1) == 1
assert worker.jobs_complete == 1
assert worker.jobs_retried == 0
assert worker.jobs_failed == 0
assert 'foo(1)' in caplog.text
assert 'foo(2)' not in caplog.text
async def test_max_bursts_dont_get(arq_redis: ArqRedis, worker):
async def foo(ctx, v):
return v + 1
await arq_redis.enqueue_job('foo', 1)
await arq_redis.enqueue_job('foo', 2)
worker: Worker = worker(functions=[func(foo, name='foo')])
worker.max_burst_jobs = 0
assert len(worker.tasks) == 0
await worker._poll_iteration()
assert len(worker.tasks) == 0
async def test_non_burst(arq_redis: ArqRedis, worker, caplog, loop):
async def foo(ctx, v):
return v + 1
caplog.set_level(logging.INFO)
await arq_redis.enqueue_job('foo', 1, _job_id='testing')
worker: Worker = worker(functions=[func(foo, name='foo')])
worker.burst = False
t = loop.create_task(worker.main())
await asyncio.sleep(0.1)
t.cancel()
assert worker.jobs_complete == 1
assert worker.jobs_retried == 0
assert worker.jobs_failed == 0
assert '← testing:foo ● 2' in caplog.text
async def test_multi_exec(arq_redis: ArqRedis, worker, caplog):
c = 0
async def foo(ctx, v):
nonlocal c
c += 1
return v + 1
caplog.set_level(logging.DEBUG, logger='arq.worker')
await arq_redis.enqueue_job('foo', 1, _job_id='testing')
worker: Worker = worker(functions=[func(foo, name='foo')])
await asyncio.gather(*[worker.start_jobs([b'testing']) for _ in range(5)])
# debug(caplog.text)
await worker.main()
assert c == 1
# assert 'multi-exec error, job testing already started elsewhere' in caplog.text
# assert 'WatchVariableError' not in caplog.text
async def test_abort_job(arq_redis: ArqRedis, worker, caplog, loop):
async def longfunc(ctx):
await asyncio.sleep(3600)
async def wait_and_abort(job, delay=0.1):
await asyncio.sleep(delay)
assert await job.abort() is True
caplog.set_level(logging.INFO)
await arq_redis.zadd(abort_jobs_ss, {b'foobar': int(1e9)})
job = await arq_redis.enqueue_job('longfunc', _job_id='testing')
worker: Worker = worker(functions=[func(longfunc, name='longfunc')], allow_abort_jobs=True, poll_delay=0.1)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await asyncio.gather(wait_and_abort(job), worker.main())
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs → testing:longfunc()\n X.XXs ⊘ testing:longfunc aborted' in log
assert worker.aborting_tasks == set()
assert worker.tasks == {}
assert worker.job_tasks == {}
async def test_abort_job_before(arq_redis: ArqRedis, worker, caplog, loop):
async def longfunc(ctx):
await asyncio.sleep(3600)
caplog.set_level(logging.INFO)
job = await arq_redis.enqueue_job('longfunc', _job_id='testing')
worker: Worker = worker(functions=[func(longfunc, name='longfunc')], allow_abort_jobs=True, poll_delay=0.1)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
with pytest.raises(asyncio.TimeoutError):
await job.abort(timeout=0)
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs ⊘ testing:longfunc aborted before start' in log
await worker.main()
assert worker.aborting_tasks == set()
assert worker.job_tasks == {}
assert worker.tasks == {}
async def test_not_abort_job(arq_redis: ArqRedis, worker, caplog, loop):
async def shortfunc(ctx):
await asyncio.sleep(0.2)
async def wait_and_abort(job, delay=0.1):
await asyncio.sleep(delay)
assert await job.abort() is False
caplog.set_level(logging.INFO)
job = await arq_redis.enqueue_job('shortfunc', _job_id='testing')
worker: Worker = worker(functions=[func(shortfunc, name='shortfunc')], poll_delay=0.1)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await asyncio.gather(wait_and_abort(job), worker.main())
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs → testing:shortfunc()\n X.XXs ← testing:shortfunc ●' in log
await worker.main()
assert worker.aborting_tasks == set()
assert worker.tasks == {}
assert worker.job_tasks == {}
async def test_job_timeout(arq_redis: ArqRedis, worker, caplog):
async def longfunc(ctx):
await asyncio.sleep(0.3)
caplog.set_level(logging.ERROR)
await arq_redis.enqueue_job('longfunc', _job_id='testing')
worker: Worker = worker(functions=[func(longfunc, name='longfunc')], job_timeout=0.2, poll_delay=0.1)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
await worker.main()
assert worker.jobs_complete == 0
assert worker.jobs_failed == 1
assert worker.jobs_retried == 0
log = re.sub(r'\d+.\d\ds', 'X.XXs', '\n'.join(r.message for r in caplog.records))
assert 'X.XXs ! testing:longfunc failed, TimeoutError:' in log
async def test_on_job(arq_redis: ArqRedis, worker):
result = {'called': 0}
async def on_start(ctx):
assert ctx['job_id'] == 'testing'
result['called'] += 1
async def on_end(ctx):
assert ctx['job_id'] == 'testing'
result['called'] += 1
async def test(ctx):
return
await arq_redis.enqueue_job('func', _job_id='testing')
worker: Worker = worker(
functions=[func(test, name='func')],
on_job_start=on_start,
on_job_end=on_end,
job_timeout=0.2,
poll_delay=0.1,
)
assert worker.jobs_complete == 0
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
assert result['called'] == 0
await worker.main()
assert worker.jobs_complete == 1
assert worker.jobs_failed == 0
assert worker.jobs_retried == 0
assert result['called'] == 2
| samuelcolvin/arq | tests/test_worker.py | Python | mit | 31,932 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from hyputils.memex.util import markdown
class TestRender(object):
def test_it_renders_markdown(self):
actual = markdown.render("_emphasis_ **bold**")
assert "<p><em>emphasis</em> <strong>bold</strong></p>\n" == actual
def test_it_ignores_math_block(self):
actual = markdown.render("$$1 + 1 = 2$$")
assert "<p>$$1 + 1 = 2$$</p>\n" == actual
def test_it_ignores_inline_match(self):
actual = markdown.render(r"Foobar \(1 + 1 = 2\)")
assert "<p>Foobar \\(1 + 1 = 2\\)</p>\n" == actual
def test_it_sanitizes_the_output(self, markdown_render, sanitize):
markdown.render("foobar")
sanitize.assert_called_once_with(markdown_render.return_value)
@pytest.fixture
def markdown_render(self, patch):
return patch("hyputils.memex.util.markdown.markdown")
@pytest.fixture
def sanitize(self, patch):
return patch("hyputils.memex.util.markdown.sanitize")
class TestSanitize(object):
@pytest.mark.parametrize(
"text,expected",
[
(
'<a href="https://example.org">example</a>',
'<a href="https://example.org" rel="nofollow noopener" target="_blank">example</a>',
),
# Don't add rel and target attrs to mailto: links
('<a href="mailto:foo@example.net">example</a>', None),
('<a title="foobar">example</a>', None),
(
'<a href="https://example.org" rel="nofollow noopener" target="_blank" title="foobar">example</a>',
None,
),
("<blockquote>Foobar</blockquote>", None),
("<code>foobar</code>", None),
("<em>foobar</em>", None),
("<hr>", None),
("<h1>foobar</h1>", None),
("<h2>foobar</h2>", None),
("<h3>foobar</h3>", None),
("<h4>foobar</h4>", None),
("<h5>foobar</h5>", None),
("<h6>foobar</h6>", None),
('<img src="http://example.com/img.jpg">', None),
('<img src="/img.jpg">', None),
('<img alt="foobar" src="/img.jpg">', None),
('<img src="/img.jpg" title="foobar">', None),
('<img alt="hello" src="/img.jpg" title="foobar">', None),
("<ol><li>foobar</li></ol>", None),
("<p>foobar</p>", None),
("<pre>foobar</pre>", None),
("<strong>foobar</strong>", None),
("<ul><li>foobar</li></ul>", None),
],
)
def test_it_allows_markdown_html(self, text, expected):
if expected is None:
expected = text
assert markdown.sanitize(text) == expected
@pytest.mark.parametrize(
"text,expected",
[
("<script>evil()</script>", "<script>evil()</script>"),
(
'<a href="#" onclick="evil()">foobar</a>',
'<a href="#" rel="nofollow noopener" target="_blank">foobar</a>',
),
(
'<a href="#" onclick=evil()>foobar</a>',
'<a href="#" rel="nofollow noopener" target="_blank">foobar</a>',
),
("<a href=\"javascript:alert('evil')\">foobar</a>", "<a>foobar</a>"),
('<img src="/evil.jpg" onclick="evil()">', '<img src="/evil.jpg">'),
("<img src=\"javascript:alert('evil')\">", "<img>"),
],
)
def test_it_escapes_evil_html(self, text, expected):
assert markdown.sanitize(text) == expected
def test_it_adds_target_blank_and_rel_nofollow_to_links(self):
actual = markdown.sanitize('<a href="https://example.org">Hello</a>')
expected = '<a href="https://example.org" rel="nofollow noopener" target="_blank">Hello</a>'
assert actual == expected
| tgbugs/hypush | test/memex/util/markdown_test.py | Python | mit | 3,893 |
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License GPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from odoo.tests.common import TransactionCase
from odoo import fields
class TestMedicalPatientDisease(TransactionCase):
def setUp(self):
super(TestMedicalPatientDisease, self).setUp()
self.disease_1 = self.env.ref(
'medical_patient_disease.medical_patient_disease_disease_1'
)
self.disease_2 = self.env.ref(
'medical_patient_disease.medical_patient_disease_disease_2'
)
def test_name_without_short_comment(self):
""" Test name without short_comment present """
self.assertEquals(
self.disease_1.name, 'Malaria - Cause of death',
'Name should include short_comment.\rGot: %s\rExpected: %s' % (
self.disease_1.name, 'Malaria - Cause of death'
)
)
def test_name_with_short_comment(self):
""" Test name if short_comment present """
self.assertEquals(
self.disease_2.name, 'HIV/AIDS',
'Name should include short_comment.\rGot: %s\rExpected: %s' % (
self.disease_2.name, 'HIV/AIDS'
)
)
def test_compute_date_expire_active_disease(self):
""" Test date_expire is False if disease is active """
self.disease_2.active = True
self.assertFalse(
self.disease_2.date_expire,
'date_expire should be False.\rGot: %s\rExpected: %s' % (
self.disease_2.date_expire, False
)
)
def test_compute_date_expire_not_active_disease(self):
""" Test date_expire is Datetime.now() if disease not active """
self.disease_2.active = False
self.assertEquals(
self.disease_2.date_expire, fields.Datetime.now(),
'date_expire should be Datetime.now().\rGot: %s\rExpected: %s' % (
self.disease_2.date_expire, fields.Datetime.now()
)
)
def test_action_invalidate(self):
""" Test disease active field is False on invalidation """
self.disease_2.active = True
self.disease_2.action_invalidate()
self.assertFalse(
self.disease_2.active,
'Partner should be reactivated.\rGot: %s\rExpected: %s' % (
self.disease_2.active, False
)
)
def test_action_revalidate(self):
""" Test disease active field is True on revalidation """
self.disease_2.active = False
self.disease_2.action_revalidate()
self.assertTrue(
self.disease_2.active,
'Disease should be reactivated.\rGot: %s\rExpected: %s' % (
self.disease_2.active, True
)
)
| laslabs/vertical-medical | medical_patient_disease/tests/test_medical_patient_disease.py | Python | agpl-3.0 | 2,806 |
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import unittest
import doctest
import trytond.tests.test_tryton
from trytond.tests.test_tryton import ModuleTestCase
from trytond.tests.test_tryton import doctest_setup, doctest_teardown
class AccountStockContinentalTestCase(ModuleTestCase):
'Test Account Stock Continental module'
module = 'account_stock_continental'
def suite():
suite = trytond.tests.test_tryton.suite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(
AccountStockContinentalTestCase))
suite.addTests(doctest.DocFileSuite(
'scenario_account_stock_continental.rst',
setUp=doctest_setup, tearDown=doctest_teardown, encoding='utf-8',
optionflags=doctest.REPORT_ONLY_FIRST_FAILURE))
return suite
| kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/modules/account_stock_continental/tests/test_account_stock_continental.py | Python | gpl-3.0 | 890 |
# Generated by Django 2.2.19 on 2021-04-24 14:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('conference', '0028_changes_to_conferencetag_and_conferencetaggeditem'),
]
operations = [
migrations.AddField(
model_name='talk',
name='availability',
field=models.TextField(blank=True, default='', help_text='<p>Please enter your time availability.</p>', verbose_name='Timezone availability'),
),
]
| EuroPython/epcon | conference/migrations/0029_talk_availability.py | Python | bsd-2-clause | 527 |
"""
The radical of n, rad(n), is the product of distinct prime factors of n. For
example, 504 = 2^3 x 3^2 x 7, so rad(504) = 2 x 3 x 7 = 42.
If we calculate rad(n) for 1 n 10, then sort them on rad(n), and sorting on n
Let E(k) be the kth element in the sorted n column; for example, E(4) = 8 and
E(6) = 9.
If rad(n) is sorted for 1 n 100000, find E(10000).
"""
from number_theory import rad, decomp_sieve
from itertools import count
N = 100000
pds = decomp_sieve(N)
rads = [(rad(n), i) for n, i in zip(pds, count(0))]
rads.sort()
print rads[10000][1]
| peterstace/project-euler | OLD_PY_CODE/project_euler_old_old/124/124.py | Python | unlicense | 561 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
import logging
import docopt
import psd_tools.reader
import psd_tools.decoder
from psd_tools import PSDImage
from psd_tools.user_api.layers import group_layers
from psd_tools.debug import pprint
from psd_tools.version import __version__
logger = logging.getLogger('psd_tools')
logger.addHandler(logging.StreamHandler())
def main():
"""
psd-tools.py
Usage:
psd-tools.py convert <psd_filename> <out_filename> [options]
psd-tools.py export_layer <psd_filename> <layer_index> <out_filename> [options]
psd-tools.py debug <filename> [options]
psd-tools.py -h | --help
psd-tools.py --version
Options:
-v --verbose Be more verbose.
--encoding <encoding> Text encoding [default: latin1].
"""
args = docopt.docopt(main.__doc__, version=__version__)
if args['--verbose']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
encoding = args['--encoding']
if args['convert']:
psd = PSDImage.load(args['<psd_filename>'], encoding=encoding)
im = psd.as_PIL()
im.save(args['<out_filename>'])
elif args['export_layer']:
psd = PSDImage.load(args['<psd_filename>'], encoding=encoding)
index = int(args['<layer_index>'])
im = psd.layers[index].as_PIL()
im.save(args['<out_filename>'])
print(psd.layers)
psd.as_PIL()
elif args['debug']:
with open(args['<filename>'], "rb") as f:
decoded = psd_tools.decoder.parse(
psd_tools.reader.parse(f, encoding)
)
print("\nHeader\n------")
print(decoded.header)
print("\nDecoded data\n-----------")
pprint(decoded)
print("\nLayers\n------")
pprint(group_layers(decoded))
| EvgenKo423/psd-tools | src/psd_tools/cli.py | Python | mit | 1,922 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <adam.stevko@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: flowadm
short_description: Manage bandwidth resource control and priority for protocols, services and zones on Solaris/illumos systems
description:
- Create/modify/remove networking bandwidth and associated resources for a type of traffic on a particular link.
version_added: "2.2"
author: Adam Števko (@xen0l)
options:
name:
description: >
- A flow is defined as a set of attributes based on Layer 3 and Layer 4
headers, which can be used to identify a protocol, service, or a zone.
required: true
aliases: [ 'flow' ]
link:
description:
- Specifiies a link to configure flow on.
required: false
local_ip:
description:
- Identifies a network flow by the local IP address.
required: false
remove_ip:
description:
- Identifies a network flow by the remote IP address.
required: false
transport:
description: >
- Specifies a Layer 4 protocol to be used. It is typically used in combination with I(local_port) to
identify the service that needs special attention.
required: false
local_port:
description:
- Identifies a service specified by the local port.
required: false
dsfield:
description: >
- Identifies the 8-bit differentiated services field (as defined in
RFC 2474). The optional dsfield_mask is used to state the bits of interest in
the differentiated services field when comparing with the dsfield
value. Both values must be in hexadecimal.
required: false
maxbw:
description: >
- Sets the full duplex bandwidth for the flow. The bandwidth is
specified as an integer with one of the scale suffixes(K, M, or G
for Kbps, Mbps, and Gbps). If no units are specified, the input
value will be read as Mbps.
required: false
priority:
description:
- Sets the relative priority for the flow.
required: false
default: 'medium'
choices: [ 'low', 'medium', 'high' ]
temporary:
description:
- Specifies that the configured flow is temporary. Temporary
flows do not persist across reboots.
required: false
default: false
choices: [ "true", "false" ]
state:
description:
- Create/delete/enable/disable an IP address on the network interface.
required: false
default: present
choices: [ 'absent', 'present', 'resetted' ]
'''
EXAMPLES = '''
# Limit SSH traffic to 100M via vnic0 interface
- flowadm:
link: vnic0
flow: ssh_out
transport: tcp
local_port: 22
maxbw: 100M
state: present
# Reset flow properties
- flowadm:
name: dns
state: resetted
# Configure policy for EF PHB (DSCP value of 101110 from RFC 2598) with a bandwidth of 500 Mbps and a high priority.
- flowadm:
link: bge0
dsfield: '0x2e:0xfc'
maxbw: 500M
priority: high
flow: efphb-flow
state: present
'''
RETURN = '''
name:
description: flow name
returned: always
type: string
sample: "http_drop"
link:
description: flow's link
returned: if link is defined
type: string
sample: "vnic0"
state:
description: state of the target
returned: always
type: string
sample: "present"
temporary:
description: flow's persistence
returned: always
type: boolean
sample: "True"
priority:
description: flow's priority
returned: if priority is defined
type: string
sample: "low"
transport:
description: flow's transport
returned: if transport is defined
type: string
sample: "tcp"
maxbw:
description: flow's maximum bandwidth
returned: if maxbw is defined
type: string
sample: "100M"
local_Ip:
description: flow's local IP address
returned: if local_ip is defined
type: string
sample: "10.0.0.42"
local_port:
description: flow's local port
returned: if local_port is defined
type: int
sample: 1337
remote_Ip:
description: flow's remote IP address
returned: if remote_ip is defined
type: string
sample: "10.0.0.42"
dsfield:
description: flow's differentiated services value
returned: if dsfield is defined
type: string
sample: "0x2e:0xfc"
'''
import socket
from ansible.module_utils.basic import AnsibleModule
SUPPORTED_TRANSPORTS = ['tcp', 'udp', 'sctp', 'icmp', 'icmpv6']
SUPPORTED_PRIORITIES = ['low', 'medium', 'high']
SUPPORTED_ATTRIBUTES = ['local_ip', 'remote_ip', 'transport', 'local_port', 'dsfield']
SUPPORTPED_PROPERTIES = ['maxbw', 'priority']
class Flow(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.link = module.params['link']
self.local_ip = module.params['local_ip']
self.remote_ip = module.params['remote_ip']
self.transport = module.params['transport']
self.local_port = module.params['local_port']
self.dsfield = module.params['dsfield']
self.maxbw = module.params['maxbw']
self.priority = module.params['priority']
self.temporary = module.params['temporary']
self.state = module.params['state']
self._needs_updating = {
'maxbw': False,
'priority': False,
}
@classmethod
def is_valid_port(cls, port):
return 1 <= int(port) <= 65535
@classmethod
def is_valid_address(cls, ip):
if ip.count('/') == 1:
ip_address, netmask = ip.split('/')
else:
ip_address = ip
if len(ip_address.split('.')) == 4:
try:
socket.inet_pton(socket.AF_INET, ip_address)
except socket.error:
return False
if not 0 <= netmask <= 32:
return False
else:
try:
socket.inet_pton(socket.AF_INET6, ip_address)
except socket.error:
return False
if not 0 <= netmask <= 128:
return False
return True
@classmethod
def is_hex(cls, number):
try:
int(number, 16)
except ValueError:
return False
return True
@classmethod
def is_valid_dsfield(cls, dsfield):
dsmask = None
if dsfield.count(':') == 1:
dsval = dsfield.split(':')[0]
else:
dsval, dsmask = dsfield.split(':')
if dsmask and not 0x01 <= int(dsmask, 16) <= 0xff and not 0x01 <= int(dsval, 16) <= 0xff:
return False
elif not 0x01 <= int(dsval, 16) <= 0xff:
return False
return True
def flow_exists(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('show-flow')
cmd.append(self.name)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def delete_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('remove-flow')
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def create_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('add-flow')
cmd.append('-l')
cmd.append(self.link)
if self.local_ip:
cmd.append('-a')
cmd.append('local_ip=' + self.local_ip)
if self.remote_ip:
cmd.append('-a')
cmd.append('remote_ip=' + self.remote_ip)
if self.transport:
cmd.append('-a')
cmd.append('transport=' + self.transport)
if self.local_port:
cmd.append('-a')
cmd.append('local_port=' + self.local_port)
if self.dsfield:
cmd.append('-a')
cmd.append('dsfield=' + self.dsfield)
if self.maxbw:
cmd.append('-p')
cmd.append('maxbw=' + self.maxbw)
if self.priority:
cmd.append('-p')
cmd.append('priority=' + self.priority)
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def _query_flow_props(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('show-flowprop')
cmd.append('-c')
cmd.append('-o')
cmd.append('property,possible')
cmd.append(self.name)
return self.module.run_command(cmd)
def flow_needs_udpating(self):
(rc, out, err) = self._query_flow_props()
NEEDS_UPDATING = False
if rc == 0:
properties = (line.split(':') for line in out.rstrip().split('\n'))
for prop, value in properties:
if prop == 'maxbw' and self.maxbw != value:
self._needs_updating.update({prop: True})
NEEDS_UPDATING = True
elif prop == 'priority' and self.priority != value:
self._needs_updating.update({prop: True})
NEEDS_UPDATING = True
return NEEDS_UPDATING
else:
self.module.fail_json(msg='Error while checking flow properties: %s' % err,
stderr=err,
rc=rc)
def update_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('set-flowprop')
if self.maxbw and self._needs_updating['maxbw']:
cmd.append('-p')
cmd.append('maxbw=' + self.maxbw)
if self.priority and self._needs_updating['priority']:
cmd.append('-p')
cmd.append('priority=' + self.priority)
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['flow']),
link=dict(required=False),
local_ip=dict(required=False),
remote_ip=dict(required=False),
transport=dict(required=False, choices=SUPPORTED_TRANSPORTS),
local_port=dict(required=False),
dsfield=dict(required=False),
maxbw=dict(required=False),
priority=dict(required=False,
default='medium',
choices=SUPPORTED_PRIORITIES),
temporary=dict(default=False, type='bool'),
state=dict(required=False,
default='present',
choices=['absent', 'present', 'resetted']),
),
mutually_exclusive=[
('local_ip', 'remote_ip'),
('local_ip', 'transport'),
('local_ip', 'local_port'),
('local_ip', 'dsfield'),
('remote_ip', 'transport'),
('remote_ip', 'local_port'),
('remote_ip', 'dsfield'),
('transport', 'dsfield'),
('local_port', 'dsfield'),
],
supports_check_mode=True
)
flow = Flow(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = flow.name
result['state'] = flow.state
result['temporary'] = flow.temporary
if flow.link:
result['link'] = flow.link
if flow.maxbw:
result['maxbw'] = flow.maxbw
if flow.priority:
result['priority'] = flow.priority
if flow.local_ip:
if flow.is_valid_address(flow.local_ip):
result['local_ip'] = flow.local_ip
if flow.remote_ip:
if flow.is_valid_address(flow.remote_ip):
result['remote_ip'] = flow.remote_ip
if flow.transport:
result['transport'] = flow.transport
if flow.local_port:
if flow.is_valid_port(flow.local_port):
result['local_port'] = flow.local_port
else:
module.fail_json(msg='Invalid port: %s' % flow.local_port,
rc=1)
if flow.dsfield:
if flow.is_valid_dsfield(flow.dsfield):
result['dsfield'] = flow.dsfield
else:
module.fail_json(msg='Invalid dsfield: %s' % flow.dsfield,
rc=1)
if flow.state == 'absent':
if flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.delete_flow()
if rc != 0:
module.fail_json(msg='Error while deleting flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
elif flow.state == 'present':
if not flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.create_flow()
if rc != 0:
module.fail_json(msg='Error while creating flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
else:
if flow.flow_needs_udpating():
(rc, out, err) = flow.update_flow()
if rc != 0:
module.fail_json(msg='Error while updating flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
elif flow.state == 'resetted':
if flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.reset_flow()
if rc != 0:
module.fail_json(msg='Error while resetting flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
| tszym/ansible | lib/ansible/modules/network/illumos/flowadm.py | Python | gpl-3.0 | 14,817 |
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Float, Range, Property
from traits.trait_errors import TraitError
from traitsui.api import View, Item, RangeEditor, VGroup
# ============= local library imports ==========================
from pychron.spectrometer.base_source import BaseSource
# ============= standard library imports ========================
class ThermoSource(BaseSource):
trap_voltage = Property(depends_on="_trap_voltage")
_trap_voltage = Float
trap_current = Property(depends_on="_trap_current")
_trap_current = Float
z_symmetry = Property(depends_on="_z_symmetry")
y_symmetry = Property(depends_on="_y_symmetry")
extraction_lens = Property(Range(0, 100.0), depends_on="_extraction_lens")
emission = Float
_y_symmetry = Float # Range(0.0, 100.)
_z_symmetry = Float # Range(0.0, 100.)
y_symmetry_low = Float(-100.0)
y_symmetry_high = Float(100.0)
z_symmetry_low = Float(-100.0)
z_symmetry_high = Float(100.0)
_extraction_lens = Float # Range(0.0, 100.)
def set_hv(self, v):
return self._set_value("SetHV", v)
def read_emission(self):
return self._read_value("GetParameter Source Current Readback", "emission")
def read_trap_current(self):
return self._read_value("GetParameter Trap Current Readback", "_trap_current")
def read_y_symmetry(self):
return self._read_value("GetParameter Y-Symmetry Set", "_y_symmetry")
def read_z_symmetry(self):
return self._read_value("GetParameter Z-Symmetry Set", "_z_symmetry")
def read_trap_voltage(self):
return self._read_value("GetParameter Trap Voltage Readback", "_trap_voltage")
def read_hv(self):
return self._read_value("GetHighVoltage", "current_hv")
def _set_value(self, name, v):
r = self.ask("{} {}".format(name, v))
if r is not None:
if r.lower().strip() == "ok":
return True
def _read_value(self, name, value):
r = self.ask(name, verbose=True)
try:
r = round(float(r), 3)
setattr(self, value, r)
return getattr(self, value)
except (ValueError, TypeError, TraitError):
return 0
def sync_parameters(self):
self.read_y_symmetry()
self.read_z_symmetry()
self.read_trap_current()
self.read_hv()
def _get_default_group(self, label=None):
g = VGroup(
Item("nominal_hv", format_str="%0.4f"),
Item("current_hv", format_str="%0.4f", style="readonly"),
Item("trap_current"),
Item("trap_voltage"),
Item(
"y_symmetry",
editor=RangeEditor(
low_name="y_symmetry_low",
high_name="y_symmetry_high",
mode="slider",
),
),
Item(
"z_symmetry",
editor=RangeEditor(
low_name="z_symmetry_low",
high_name="z_symmetry_high",
mode="slider",
),
),
Item("extraction_lens"),
)
if label:
g.label = label
return g
def traits_view(self):
v = View(self._get_default_group())
return v
# ===============================================================================
# property get/set
# ===============================================================================
def _get_trap_voltage(self):
return self._trap_voltage
def _get_trap_current(self):
return self._trap_current
def _get_y_symmetry(self):
return self._y_symmetry
def _get_z_symmetry(self):
return self._z_symmetry
def _get_extraction_lens(self):
return self._extraction_lens
def _set_trap_voltage(self, v):
if self._set_value("SetParameter", "Trap Voltage Set,{}".format(v)):
self._trap_voltage = v
def _set_trap_current(self, v):
if self._set_value("SetParameter", "Trap Current Set,{}".format(v)):
self._trap_current = v
def _set_y_symmetry(self, v):
if self._set_value("SetYSymmetry", v):
self._y_symmetry = v
def _set_z_symmetry(self, v):
if self._set_value("SetZSymmetry", v):
self._z_symmetry = v
def _set_extraction_lens(self, v):
if self._set_value("SetExtractionLens", v):
self._extraction_lens = v
# ============= EOF =============================================
| USGSDenverPychron/pychron | pychron/spectrometer/thermo/source/base.py | Python | apache-2.0 | 5,373 |
# ==========================================================================
# This module performs unit tests for the GammaLib CTA module.
#
# Copyright (C) 2012-2021 Juergen Knoedlseder
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ==========================================================================
import os
import gammalib
import math
import test_support
# ================================== #
# Test class for GammaLib CTA module #
# ================================== #
class Test(gammalib.GPythonTestSuite):
"""
Test class for GammaLib CTA module
"""
# Constructor
def __init__(self):
"""
Constructor
"""
# Call base class constructor
gammalib.GPythonTestSuite.__init__(self)
# Set test directories
self._data = os.environ['TEST_CTA_DATA']
self._caldb = self._data + '/../caldb'
# Return
return
# Setup GCTAEventList container
def _setup_eventlist(self):
"""
Setup GCTAEventList container
Returns
-------
list : `~gammalib.GCTAEventList`
GCTAEventList container
"""
# Setup event list container
list = gammalib.GCTAEventList()
for i in range(10):
dir = gammalib.GCTAInstDir(gammalib.GSkyDir(),float(i),float(i))
energy = gammalib.GEnergy(float(i),'MeV')
time = gammalib.GTime(float(i),'sec')
atom = gammalib.GCTAEventAtom(dir, energy, time)
list.append(atom)
# Return event list container
return list
# Setup GCTAEventCube
def _setup_eventcube(self):
"""
Setup GCTAEventCube
Returns
-------
cube : `~gammalib.GCTAEventCube`
GCTAEventCube
"""
# Setup event cube
map = gammalib.GSkyMap('CAR','CEL',0.0,0.0,1.0,1.0,5,5,2)
ebds = gammalib.GEbounds(2, gammalib.GEnergy(1.0, 'TeV'),
gammalib.GEnergy(10.0, 'TeV'))
gti = gammalib.GGti(gammalib.GTime(0.0,'sec'),
gammalib.GTime(1.0,'sec'))
cube = gammalib.GCTAEventCube(map, ebds, gti)
counts = 0.0
for bin in cube:
counts += 1.0
bin.counts(1.0)
# Return event cube
return cube
# Test GCTAEventList class access operators
def _test_eventlist_access(self):
"""
Test GCTAEventList class observation access
"""
# Setup event list container
list = self._setup_eventlist()
# Perform event list access tests
test_support.energy_container_access_index(self, list)
# Return
return
# Test GCTAEventList class slicing
def _test_eventlist_slicing(self):
"""
Test GCTAEventList class slicing
"""
# Setup event list container
list = self._setup_eventlist()
# Perform slicing tests
test_support.energy_container_slicing(self, list)
# Return
return
# Test effective area response
def _test_aeff(self):
"""
Test GCTAAeff classes
"""
# Test GCTAAeff2D file constructor
filename = self._caldb + '/prod1_gauss.fits'
aeff = gammalib.GCTAAeff2D(filename)
# Test Aeff values
self.test_value(aeff(0.0, 0.0), 5535774176.75, 0.1,
'Test reference effective area value')
self.test_value(aeff(1.0, 0.0), 20732069462.7, 0.1,
'Test reference effective area value')
self.test_value(aeff(0.0, 0.01745), 5682897797.76, 0.1,
'Test reference effective area value')
self.test_value(aeff(1.0, 0.01745), 18446656815.1, 0.1,
'Test reference effective area value')
# Test that Aeff values outside boundaries are zero
self.test_value(aeff(-1.80001, 0.0), 0.0, 1.0e-6,
'Test that effective area is zero for energy below'
' minimum energy')
self.test_value(aeff(+2.20001, 0.0), 0.0, 1.0e-6,
'Test that effective area is zero for energy above'
' maximum energy')
self.test_value(aeff(0.0, -0.00001), 0.0, 1.0e-6,
'Test that effective area is zero for offset angle'
' below minimum offset angle')
self.test_value(aeff(0.0, 0.13963), 0.0, 1.0e-6,
'Test that effective area is zero for offset angle'
' above maximum offset angle')
# Test GCTAAeffPerfTable file constructor
filename = self._caldb + '/cta_dummy_irf.dat'
aeff = gammalib.GCTAAeffPerfTable(filename)
# Test Aeff values
self.test_value(aeff(0.0, 0.0), 2738898000.0, 0.1)
self.test_value(aeff(1.0, 0.0), 16742420500.0, 0.1)
self.test_value(aeff(0.0, 0.01745), 2590995083.29, 0.1)
self.test_value(aeff(1.0, 0.01745), 15838314971.2, 0.1)
# Test GCTAAeffArf file constructor
filename = self._caldb + '/dc1/arf.fits'
aeff = gammalib.GCTAAeffArf(filename)
# Test Aeff values
self.test_value(aeff(0.0, 0.0), 1607246236.98, 0.1)
self.test_value(aeff(1.0, 0.0), 4582282342.98, 0.1)
self.test_value(aeff(0.0, 0.01745), 1607246236.98, 0.1)
self.test_value(aeff(1.0, 0.01745), 4582282342.98, 0.1)
# Return
return
# Test point spread function response
def _test_psf(self):
"""
Test GCTAPsf classes
"""
# Test GCTAPsf2D file constructor
filename = self._caldb + '/prod1_gauss.fits'
psf = gammalib.GCTAPsf2D(filename)
# Test PSF values
self.test_value(psf(0.0, 0.0, 0.0), 163782.469465, 1.0e-6)
self.test_value(psf(0.001, 0.0, 0.0), 97904.9307797, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.0), 616076.98558, 1.0e-6)
self.test_value(psf(0.001, 1.0, 0.0), 88932.681708, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.01745), 433247.309504, 1.0e-6)
self.test_value(psf(0.001, 1.0, 0.01745), 111075.0692681, 1.0e-6)
# Test GCTAPsfKing file constructor
filename = self._caldb + '/prod1_king.fits'
psf = gammalib.GCTAPsfKing(filename)
# Test PSF values
self.test_value(psf(0.0, 0.0, 0.0), 213616.312600672, 1.0e-6)
self.test_value(psf(0.001, 0.0, 0.0), 90918.3030269623, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.0), 1126804.99931516, 1.0e-5)
self.test_value(psf(0.001, 1.0, 0.0), 54873.6646449112, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.01745), 660972.636049452, 1.0e-6)
self.test_value(psf(0.001, 1.0, 0.01745), 80272.4048345619, 1.0e-6)
# Test GCTAPsfPerfTable file constructor
filename = self._caldb + '/cta_dummy_irf.dat'
psf = gammalib.GCTAPsfPerfTable(filename)
# Test PSF values
self.test_value(psf(0.0, 0.0, 0.0), 537853.354917, 1.0e-6)
self.test_value(psf(0.001, 0.0, 0.0), 99270.360144, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.0), 1292604.7473727, 1.0e-6)
self.test_value(psf(0.001, 1.0, 0.0), 22272.4258111, 1.0e-6)
self.test_value(psf(0.0, 1.0, 0.01745), 1292604.7473727, 1.0e-6)
self.test_value(psf(0.001, 1.0, 0.01745), 22272.4258111, 1.0e-6)
# Test GCTAPsfVector file constructor
filename = self._caldb + '/dc1/psf_magic.fits'
psf = gammalib.GCTAPsfVector(filename)
# Print
self.test_value(psf(0.0, -1.0, 0.0), 42263.9572394, 1.0e-6)
self.test_value(psf(0.001, -1.0, 0.0), 37008.8652966, 1.0e-6)
self.test_value(psf(0.0, 0.0, 0.0), 208989.164294, 1.0e-6)
self.test_value(psf(0.001, 0.0, 0.0), 108388.031915, 1.0e-6)
self.test_value(psf(0.0, 0.0, 0.01745), 208989.164294, 1.0e-6)
self.test_value(psf(0.001, 0.0, 0.01745), 108388.031915, 1.0e-6)
# Return
return
# Test energy dispersion
def _test_edisp(self):
"""
Test GCTAEdisp classes
"""
# Set some energies
eng0 = gammalib.GEnergy(1.0, 'TeV')
eng01 = gammalib.GEnergy(1.2589254, 'TeV')
eng001 = gammalib.GEnergy(1.023293, 'TeV')
eng0001 = gammalib.GEnergy(1.0023052, 'TeV')
eng1001 = gammalib.GEnergy(10.023052, 'TeV')
eng1 = gammalib.GEnergy(10.0, 'TeV')
eng30 = gammalib.GEnergy(30.0, 'TeV')
# Test GCTAEdispRmf file constructor
filename = self._caldb + '/dc1/rmf.fits'
edisp = gammalib.GCTAEdispRmf(filename)
# Test energy dispersion values
self.test_value(edisp(eng30, eng0), 0.0, 1.0e-9)
self.test_value(edisp(eng0, eng30), 0.0, 1.0e-9)
# Test GCTAEdispPerfTable file constructor
filename = self._caldb + '/cta_dummy_irf.dat'
edisp = gammalib.GCTAEdispPerfTable(filename)
# Test energy dispersion values
self.test_value(edisp(eng0, eng0), 4.3386871e-06, 1.0e-6)
self.test_value(edisp(eng0001, eng0), 4.3273514e-06, 1.0e-6)
self.test_value(edisp(eng001, eng0), 4.1090489e-06, 1.0e-6)
self.test_value(edisp(eng01, eng0), 1.4984969e-07, 1.0e-6)
self.test_value(edisp(eng1, eng1), 7.8454726e-07, 1.0e-6)
self.test_value(edisp(eng1001, eng1, 0.0), 7.8194077e-07, 1.0e-6)
# Test GCTAResponseIrf file constructor
db = gammalib.GCaldb(self._caldb)
irf = 'cta_dummy_irf'
rsp = gammalib.GCTAResponseIrf(irf, db)
# Test nedisp computations
#dir = gammalib.GSkyDir()
#pnt = gammalib.GCTAPointing()
#time = gammalib.GTime()
#self.test_value(rsp.nedisp(dir, GEnergy(3.7, "TeV"), time, pnt,
# GEbounds(GEnergy(0.1, "TeV"),
# GEnergy(10.0, "TeV"))),
# 1.0, 0.005)
#self.test_value(rsp.nedisp(dir, GEnergy(3.7, "TeV"), time, pnt,
# GEbounds(GEnergy(2.72345, "TeV"),
# GEnergy(5.026615, "TeV"))),
# 1.0, 0.005)
#self.test_value(rsp.nedisp(dir, GEnergy(3.7, "TeV"), time, pnt,
# GEbounds(GEnergy(3.7, "TeV"),
# GEnergy(10.0, "TeV"))),
# 0.5, 0.005)
# Test response
def _test_response(self):
"""
Test response classes
"""
# Load 1DC CTA observation (ARF, PSF, RMF)
filename = self._data + '/irf_1dc.xml'
obs = gammalib.GObservations(filename)
# Return
return
# Test On/Off analysis
def _test_onoff(self):
"""
Test On/Off analysis
"""
# Load model container
models = gammalib.GModels(self._data + '/crab_irf.xml')
# Create On region
ondir = gammalib.GSkyDir()
ondir.radec_deg(83.6331, 22.0145)
on = gammalib.GSkyRegions()
on.append(gammalib.GSkyRegionCircle(ondir, 0.2))
# Create Off region
offdir = gammalib.GSkyDir()
offdir.radec_deg(83.6331, 23.5145)
off = gammalib.GSkyRegions()
off.append(gammalib.GSkyRegionCircle(offdir, 0.5))
# Set energy binning
etrue = gammalib.GEbounds(40, gammalib.GEnergy(0.1, 'TeV'),
gammalib.GEnergy(10.0, 'TeV'))
ereco = gammalib.GEbounds(20, gammalib.GEnergy(0.1, 'TeV'),
gammalib.GEnergy(10.0, 'TeV'))
# Create On/Off observations from CTA observations
filename = self._data + '/irf_unbinned.xml'
inobs = gammalib.GObservations(filename)
outobs = gammalib.GObservations()
for run in inobs:
onoff = gammalib.GCTAOnOffObservation(run, models, 'Crab',
etrue, ereco, on, off)
outobs.append(onoff)
# Load On/Off models and attach them to the observations
models = gammalib.GModels(self._data + '/onoff_model.xml')
outobs.models(models)
# Perform maximum likelihood fit
lm = gammalib.GOptimizerLM()
outobs.optimize(lm)
outobs.errors(lm)
# Test On/Off model fitting results
sky = outobs.models()['Crab']
bgd = outobs.models()['Background']
self.test_value(sky['Prefactor'].value(), 6.456877e-16, 1.0e-18,
'Check sky model prefactor value')
self.test_value(sky['Prefactor'].error(), 2.176260e-17, 1.0e-20,
'Check sky model prefactor error')
self.test_value(sky['Index'].value(), -2.575639, 1.0e-4,
'Check sky model index value')
self.test_value(sky['Index'].error(), 0.030702, 1.0e-4,
'Check sky model index error')
self.test_value(bgd['Prefactor'].value(), 1.182291, 0.01,
'Check background model prefactor value')
self.test_value(bgd['Prefactor'].error(), 0.152625, 0.01,
'Check background model prefactor error')
self.test_value(bgd['Index'].value(), 0.520937, 0.01,
'Check background model index value')
self.test_value(bgd['Index'].error(), 0.086309, 0.01,
'Check background model index error')
# Save PHA, ARF and RMFs
for run in outobs:
run.on_spec().save('test_cta_onoff_pha_on.fits', True)
run.off_spec().save('test_cta_onoff_pha_off.fits', True)
run.arf().save('test_cta_onoff_arf.fits', True)
run.rmf().save('test_cta_onoff_rmf.fits', True)
# Save On/Off observations
outobs.save('test_cta_onoff.xml')
# Return
return
# Test class pickeling
def _test_pickeling(self):
"""
Test class pickeling
"""
# Set CALDB
os.environ['CALDB'] = os.environ['TEST_DATA']+'/caldb'
# Perform pickeling tests of empty classes
test_support.pickeling(self, gammalib.GCTAAeff2D())
#test_support.pickeling(self, gammalib.GCTAAeffArf())
#test_support.pickeling(self, gammalib.GCTAAeffPerfTable())
test_support.pickeling(self, gammalib.GCTABackground2D())
test_support.pickeling(self, gammalib.GCTABackground3D())
#test_support.pickeling(self, gammalib.GCTABackgroundPerfTable())
test_support.pickeling(self, gammalib.GCTACubeBackground())
test_support.pickeling(self, gammalib.GCTACubeEdisp())
test_support.pickeling(self, gammalib.GCTACubeExposure())
test_support.pickeling(self, gammalib.GCTACubePsf())
#test_support.pickeling(self, gammalib.GCTACubeSourceDiffuse())
#test_support.pickeling(self, gammalib.GCTACubeSourcePoint())
test_support.pickeling(self, gammalib.GCTAEdisp2D())
#test_support.pickeling(self, gammalib.GCTAEdispPerfTable())
#test_support.pickeling(self, gammalib.GCTAEdispRmf())
test_support.pickeling(self, gammalib.GCTAEventAtom())
test_support.pickeling(self, gammalib.GCTAEventBin())
test_support.pickeling(self, gammalib.GCTAEventCube())
test_support.pickeling(self, gammalib.GCTAEventList())
test_support.pickeling(self, gammalib.GCTAInstDir())
test_support.pickeling(self, gammalib.GCTAModelSkyCube())
test_support.pickeling(self, gammalib.GCTAModelBackground())
test_support.pickeling(self, gammalib.GCTAModelAeffBackground())
test_support.pickeling(self, gammalib.GCTAModelCubeBackground())
test_support.pickeling(self, gammalib.GCTAModelIrfBackground())
test_support.pickeling(self, gammalib.GCTAModelRadialAcceptance())
test_support.pickeling(self, gammalib.GCTAModelRadialGauss())
test_support.pickeling(self, gammalib.GCTAModelRadialPolynom())
test_support.pickeling(self, gammalib.GCTAModelRadialProfile())
test_support.pickeling(self, gammalib.GCTAModelSpatialGradient())
test_support.pickeling(self, gammalib.GCTAModelSpatialMultiplicative())
test_support.pickeling(self, gammalib.GCTAObservation())
test_support.pickeling(self, gammalib.GCTAOnOffObservation())
test_support.pickeling(self, gammalib.GCTAPointing())
test_support.pickeling(self, gammalib.GCTAPsf2D())
test_support.pickeling(self, gammalib.GCTAPsfKing())
#test_support.pickeling(self, gammalib.GCTAPsfPerfTable())
test_support.pickeling(self, gammalib.GCTAPsfTable())
#test_support.pickeling(self, gammalib.GCTAPsfVector())
test_support.pickeling(self, gammalib.GCTAResponseCube())
test_support.pickeling(self, gammalib.GCTAResponseIrf())
test_support.pickeling(self, gammalib.GCTAResponseTable())
test_support.pickeling(self, gammalib.GCTARoi())
# Setup test
list = self._setup_eventlist()
cube = self._setup_eventcube()
atom = list[0]
dir = gammalib.GSkyDir()
instdir = gammalib.GCTAInstDir(dir, 2.0, -3.0)
pivot = gammalib.GEnergy(1.0,'TeV')
plaw = gammalib.GModelSpectralPlaw(1.0,-2.0,pivot)
irfname = os.environ['TEST_DATA']+'/caldb/data/cta/prod2/bcf/North_0.5h/irf_file.fits.gz'
bin = gammalib.GCTAEventBin()
bin.dir(instdir)
bin.energy(pivot)
bin.time(gammalib.GTime(1.0,'sec'))
bin.counts(1.0)
bin.solidangle(0.1)
bin.ewidth(pivot)
bin.ontime(100.0)
bin.weight(1.0)
emin = gammalib.GEnergy(1.0,'TeV')
emax = gammalib.GEnergy(10.0,'TeV')
engs = gammalib.GEnergies(10,emin,emax)
ebds = gammalib.GEbounds(2,emin,emax)
region = gammalib.GSkyRegionCircle(dir, 0.2)
regs = gammalib.GSkyRegions()
regs.append(region)
models = gammalib.GModels(self._data + '/crab_irf.xml')
expcube = gammalib.GCTACubeExposure('CAR','CEL',0.,0.,0.1,0.1,10,10,engs)
psfcube = gammalib.GCTACubePsf('CAR','CEL',0.,0.,0.1,0.1,10,10,engs,1.0,10)
bgdcube = gammalib.GCTACubeBackground('CAR','CEL',0.,0.,0.1,0.1,10,10,ebds)
edispcube = gammalib.GCTACubeEdisp('CAR','CEL',0.,0.,0.1,0.1,10,10,engs,1.0,10)
caldb = gammalib.GCaldb('cta','prod2')
rspirf = gammalib.GCTAResponseIrf('North_0.5h', caldb)
rspcube1 = gammalib.GCTAResponseCube(expcube, psfcube, bgdcube)
rspcube2 = gammalib.GCTAResponseCube(expcube, psfcube, edispcube, bgdcube)
bin.energy(pivot)
obs1 = gammalib.GCTAObservation()
obs1.events(list)
obs1.response(rspirf)
obs2 = gammalib.GCTAObservation()
obs2.events(cube)
obs2.response(rspirf)
obs3 = gammalib.GCTAObservation()
obs3.events(cube)
obs3.response(rspcube1)
obs4 = gammalib.GCTAObservation()
obs4.events(cube)
obs4.response(rspcube2)
obs5 = gammalib.GCTAOnOffObservation(obs1, models, 'Crab', ebds, ebds, regs, regs)
obs6 = gammalib.GCTAOnOffObservation(obs1, obs1, models,
'Crab', ebds, ebds, regs, regs)
radgauss = gammalib.GCTAModelRadialGauss(1.0)
radacc = gammalib.GCTAModelRadialAcceptance(radgauss, plaw)
multi = gammalib.GCTAModelSpatialMultiplicative()
multi.append(radgauss, 'Src1')
multi.append(radgauss, 'Src2')
kingname = self._caldb + '/prod1_king.fits'
hessaeff = self._data + '/irf_hess_aeff.fits.gz'
hesspsf = self._data + '/irf_hess_psf.fits.gz'
hessedisp = self._data + '/irf_hess_edisp.fits.gz'
hessbkg = self._data + '/irf_hess_bkg.fits.gz'
hessbkg2 = self._data + '/irf_hess_bkg2.fits.gz'
hessirf = gammalib.GCTAResponseIrf()
hessirf.load_aeff(hessaeff)
hessirf.load_psf(hesspsf)
hessirf.load_edisp(hessedisp)
hessirf.load_background(hessbkg)
skycube = self._data + '/crab_modcube.fits.gz'
# Perform pickeling tests of filled classes
test_support.pickeling(self, gammalib.GCTAAeff2D(irfname))
#test_support.pickeling(self, gammalib.GCTAAeffArf())
#test_support.pickeling(self, gammalib.GCTAAeffPerfTable())
test_support.pickeling(self, gammalib.GCTABackground2D(hessbkg2))
test_support.pickeling(self, gammalib.GCTABackground3D(irfname))
#test_support.pickeling(self, gammalib.GCTABackgroundPerfTable())
test_support.pickeling(self, gammalib.GCTACubeBackground(bgdcube))
test_support.pickeling(self, gammalib.GCTACubeEdisp(edispcube))
test_support.pickeling(self, gammalib.GCTACubeExposure(expcube))
test_support.pickeling(self, gammalib.GCTACubePsf(psfcube))
#test_support.pickeling(self, gammalib.GCTACubeSourceDiffuse())
#test_support.pickeling(self, gammalib.GCTACubeSourcePoint())
test_support.pickeling(self, gammalib.GCTAEdisp2D(irfname))
#test_support.pickeling(self, gammalib.GCTAEdispPerfTable())
#test_support.pickeling(self, gammalib.GCTAEdispRmf())
test_support.pickeling(self, gammalib.GCTAEventAtom(atom))
test_support.pickeling(self, gammalib.GCTAEventBin(bin))
test_support.pickeling(self, gammalib.GCTAEventCube(cube))
test_support.pickeling(self, gammalib.GCTAEventList(list))
test_support.pickeling(self, gammalib.GCTAInstDir(instdir))
test_support.pickeling(self, gammalib.GCTAModelSkyCube(skycube,plaw))
test_support.pickeling(self, gammalib.GCTAModelBackground(radgauss,plaw))
test_support.pickeling(self, gammalib.GCTAModelAeffBackground(plaw))
test_support.pickeling(self, gammalib.GCTAModelCubeBackground(plaw))
test_support.pickeling(self, gammalib.GCTAModelIrfBackground(plaw))
test_support.pickeling(self, gammalib.GCTAModelRadialAcceptance(radacc))
test_support.pickeling(self, gammalib.GCTAModelRadialGauss(1.0))
test_support.pickeling(self, gammalib.GCTAModelRadialPolynom([1.0,2.0]))
test_support.pickeling(self, gammalib.GCTAModelRadialProfile(1.0,2.0,3.0))
test_support.pickeling(self, gammalib.GCTAModelSpatialGradient(1.0,2.0))
test_support.pickeling(self, gammalib.GCTAModelSpatialMultiplicative(multi))
test_support.pickeling(self, gammalib.GCTAObservation(obs1))
test_support.pickeling(self, gammalib.GCTAObservation(obs2))
test_support.pickeling(self, gammalib.GCTAObservation(obs3))
test_support.pickeling(self, gammalib.GCTAObservation(obs4))
test_support.pickeling(self, gammalib.GCTAOnOffObservation(obs5))
test_support.pickeling(self, gammalib.GCTAOnOffObservation(obs6))
test_support.pickeling(self, gammalib.GCTAPointing(dir))
test_support.pickeling(self, gammalib.GCTAPsf2D(irfname))
test_support.pickeling(self, gammalib.GCTAPsfKing(kingname))
#test_support.pickeling(self, gammalib.GCTAPsfPerfTable())
test_support.pickeling(self, gammalib.GCTAPsfTable(hesspsf))
#test_support.pickeling(self, gammalib.GCTAPsfVector())
test_support.pickeling(self, gammalib.GCTAResponseCube(rspcube1))
test_support.pickeling(self, gammalib.GCTAResponseCube(rspcube2))
test_support.pickeling(self, gammalib.GCTAResponseIrf(rspirf))
test_support.pickeling(self, gammalib.GCTAResponseIrf(hessirf))
#test_support.pickeling(self, gammalib.GCTAResponseTable()) # No constructor
test_support.pickeling(self, gammalib.GCTARoi(instdir,2.0))
# Return
return
# Set test functions
def set(self):
"""
Set all test functions.
"""
# Set test name
self.name('CTA')
# Append tests
self.append(self._test_eventlist_access, 'Test GCTAEventList event access')
self.append(self._test_eventlist_slicing, 'Test GCTAEventList slicing')
self.append(self._test_aeff, 'Test CTA effective area classes')
self.append(self._test_psf, 'Test CTA PSF classes')
self.append(self._test_edisp, 'Test CTA energy dispersion classes')
self.append(self._test_response, 'Test CTA response classes')
self.append(self._test_onoff, 'Test CTA On/Off analysis')
self.append(self._test_pickeling, 'Test CTA class pickeling')
# Return
return
| gammalib/gammalib | inst/cta/test/test_CTA.py | Python | gpl-3.0 | 25,438 |
from localtv.tests.selenium import WebdriverTestCase
from localtv.tests.selenium.pages.front import listing_page
from django.core import management
import datetime
class ListingPages(WebdriverTestCase):
"""Tests for the various listing pages, new, featured and popular.
"""
NEW_BROWSER_PER_TEST_CASE = False
@classmethod
def setUpClass(cls):
super(ListingPages, cls).setUpClass()
cls.listing_pg = listing_page.ListingPage(cls)
def setUp(self):
super(ListingPages, self).setUp()
self._clear_index()
self.user = self.create_user(username='autotester',
first_name='webby',
last_name='driver')
def test_new__thumbs(self):
"""Verify New listing page has expected thumbnails.
"""
#CREATE 5 REGULAR VIDEOS
for x in range(5):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
update_index=True)
self.listing_pg.open_listing_page('new')
self.assertEqual(True, self.listing_pg.has_thumbnails())
self.assertEqual(True, self.listing_pg.thumbnail_count(5))
self.assertEqual(True, self.listing_pg.valid_thumbnail_sizes(162, 117))
def test_new__pagination(self):
"""New listing page is limited to 15 videos per page.
"""
#CREATE 45 REGULAR VIDEOS
for x in range(45):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
update_index=True)
self.listing_pg.open_listing_page('new')
self.assertEqual(True, self.listing_pg.has_thumbnails())
self.assertEqual(True, self.listing_pg.thumbnail_count(15))
def test_featured__pagination(self):
"""Featured listing page is limited to 15 videos per page.
"""
#CREATE 60 FEATURED VIDEOS
for x in range(60):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
watches=5,
last_featured=datetime.datetime.now(),
categories=None,
authors=None,
tags=None,
update_index=True)
self.listing_pg.open_listing_page('featured')
self.assertEqual(True, self.listing_pg.has_thumbnails())
self.assertEqual(True, self.listing_pg.thumbnail_count(15))
def test_featured__rss(self):
"""Featured listing page rss exists.
"""
#CREATE 20 FEATURED VIDEOS
for x in range(20):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
watches=5,
last_featured=datetime.datetime.now(),
categories=None,
authors=None,
tags=None,
update_index=True)
self.listing_pg.open_listing_rss_page('featured')
def test_popular__thumbs(self):
"""Verify Popular listing page has expected thumbnails.
"""
#CREATE 5 REGULAR VIDEOS
for x in range(5):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
watches=0,
update_index=True)
#CREATE 30 POPULAR VIDEOS WITH NUM WATCHES THAT MATCH THE
#NUM in the VID NAME
for x in range(11, 41):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
watches=x * 2,
update_index=True)
management.call_command('update_popularity')
self.listing_pg.open_listing_page('popular')
self.assertEqual(True, self.listing_pg.has_thumbnails())
self.assertEqual(True, self.listing_pg.valid_thumbnail_sizes(162, 117))
def test_featured__thumbs(self):
"""Verify Featured listing page has expected thumbnails.
"""
self.listing_pg = listing_page.ListingPage(self)
#CREATE 5 REGULAR VIDEOS
for x in range(5):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
update_index=True)
#CREATE VIDEOS THAT ARE FEATURED
for x in range(16, 21):
vid_name = 'listing_test_' + str(x)
self.create_video(name=vid_name,
last_featured=datetime.datetime.now(),
update_index=True)
self.listing_pg.open_listing_page('featured')
self.assertEqual(True, self.listing_pg.has_thumbnails())
self.assertEqual(True, self.listing_pg.valid_thumbnail_sizes(162, 117))
#Only the 5 Featured Videos should be displayed on the Page
self.assertEqual(True, self.listing_pg.thumbnail_count(5))
def test_new__title(self):
"""Verify videos listed have titles that are links to vid page.
"""
title = 'webdriver test video'
video = self.create_video(name=title,
description=('This is the most awesome test '
'video ever!'),
user=self.user,
categories=[self.create_category(name='webdriver',
slug='webdriver')])
self.listing_pg.open_listing_page('new')
self.assertTrue(self.listing_pg.has_title(title))
elem = self.browser.find_element_by_css_selector(self.listing_pg._TITLE)
elem.click()
self.assertTrue(self.browser.current_url.endswith(video.get_absolute_url()))
def test_listing__overlay(self):
"""Verify overlay appears on hover and has description text.
"""
title = 'webdriver test video'
description = 'This is the most awesome test video ever'
video = self.create_video(name=title,
description=description)
self.listing_pg.open_listing_page('new')
has_overlay, overlay_description = self.listing_pg.has_overlay(video)
self.assertTrue(has_overlay)
self.assertIn(description, overlay_description)
def test_listing__author(self):
"""Verify overlay appears on hover and has author text.
"""
title = 'webdriver test video'
description = 'This is the most awesome test video ever'
video = self.create_video(name=title,
description=description,
authors=[self.user.id],
watches=1)
self.listing_pg.open_listing_page('popular')
_, overlay_text = self.listing_pg.has_overlay(video)
self.assertIn(self.user.get_full_name(), overlay_text)
def test_new__page_name(self):
"""Verify new page display name on page.
"""
self.listing_pg.open_listing_page('new')
self.assertEqual('New Videos', self.listing_pg.page_name())
def test__new__page_rss(self):
"""Verify new page rss feed url link is present.
"""
self.listing_pg.open_listing_page('new')
feed_url = self.base_url + self.listing_pg._FEED_PAGE % 'new'
self.assertEqual(feed_url, self.listing_pg.page_rss())
def test_popular__page_name(self):
"""Verify popular page display name on page.
"""
self.listing_pg.open_listing_page('popular')
self.assertEqual('Popular Videos', self.listing_pg.page_name())
def test_listing_popular__page_rss(self):
"""Verify popular page rss feed url link is present.
"""
self.listing_pg.open_listing_page('popular')
feed_url = self.base_url + self.listing_pg._FEED_PAGE % 'popular'
self.assertEqual(feed_url, self.listing_pg.page_rss())
def test_featured__page_name(self):
"""Verify featured page display name on page.
"""
self.listing_pg.open_listing_page('featured')
self.assertEqual('Featured Videos', self.listing_pg.page_name())
def test_listing_featured__page_rss(self):
"""Verify listing page rss feed url link is present.
"""
self.listing_pg.open_listing_page('featured')
feed_url = self.base_url + self.listing_pg._FEED_PAGE % 'featured'
self.assertEqual(feed_url, self.listing_pg.page_rss())
def published(self, listing):
"""Verify videos display published date (if configured)."""
assert False, 'this needs to be implemented'
| pculture/mirocommunity | localtv/tests/selenium/test_listing_pages.py | Python | agpl-3.0 | 8,846 |
"""
Utility Routines for Working with Matplotlib Objects
====================================================
"""
import itertools
import io
import base64
import numpy as np
import warnings
import matplotlib
from matplotlib.colors import colorConverter
from matplotlib.path import Path
from matplotlib.markers import MarkerStyle
from matplotlib.transforms import Affine2D
from matplotlib import ticker
# NOTE: bokeh mod
from bokeh.util.dependencies import import_optional
pd = import_optional('pandas')
def color_to_hex(color):
"""Convert matplotlib color code to hex color code"""
if color is None or colorConverter.to_rgba(color)[3] == 0:
return 'none'
else:
rgb = colorConverter.to_rgb(color)
return '#{0:02X}{1:02X}{2:02X}'.format(*(int(255 * c) for c in rgb))
def _many_to_one(input_dict):
"""Convert a many-to-one mapping to a one-to-one mapping"""
return dict((key, val)
for keys, val in input_dict.items()
for key in keys)
LINESTYLES = _many_to_one({('solid', '-', (None, None)): 'none',
('dashed', '--'): "6,6",
('dotted', ':'): "2,2",
('dashdot', '-.'): "4,4,2,4",
('', ' ', 'None', 'none'): None})
def get_dasharray(obj):
"""Get an SVG dash array for the given matplotlib linestyle
Parameters
----------
obj : matplotlib object
The matplotlib line or path object, which must have a get_linestyle()
method which returns a valid matplotlib line code
Returns
-------
dasharray : string
The HTML/SVG dasharray code associated with the object.
"""
if obj.__dict__.get('_dashSeq', None) is not None:
return ','.join(map(str, obj._dashSeq))
else:
ls = obj.get_linestyle()
dasharray = LINESTYLES.get(ls, 'not found')
if dasharray == 'not found':
warnings.warn("line style '{0}' not understood: "
"defaulting to solid line.".format(ls))
dasharray = LINESTYLES['solid']
return dasharray
PATH_DICT = {Path.LINETO: 'L',
Path.MOVETO: 'M',
Path.CURVE3: 'S',
Path.CURVE4: 'C',
Path.CLOSEPOLY: 'Z'}
def SVG_path(path, transform=None, simplify=False):
"""Construct the vertices and SVG codes for the path
Parameters
----------
path : matplotlib.Path object
transform : matplotlib transform (optional)
if specified, the path will be transformed before computing the output.
Returns
-------
vertices : array
The shape (M, 2) array of vertices of the Path. Note that some Path
codes require multiple vertices, so the length of these vertices may
be longer than the list of path codes.
path_codes : list
A length N list of single-character path codes, N <= M. Each code is
a single character, in ['L','M','S','C','Z']. See the standard SVG
path specification for a description of these.
"""
if transform is not None:
path = path.transformed(transform)
vc_tuples = [(vertices if path_code != Path.CLOSEPOLY else [],
PATH_DICT[path_code])
for (vertices, path_code)
in path.iter_segments(simplify=simplify)]
if not vc_tuples:
# empty path is a special case
return np.zeros((0, 2)), []
else:
vertices, codes = zip(*vc_tuples)
vertices = np.array(list(itertools.chain(*vertices))).reshape(-1, 2)
return vertices, list(codes)
def get_path_style(path, fill=True):
"""Get the style dictionary for matplotlib path objects"""
style = {}
style['alpha'] = path.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['edgecolor'] = color_to_hex(path.get_edgecolor())
if fill:
style['facecolor'] = color_to_hex(path.get_facecolor())
else:
style['facecolor'] = 'none'
style['edgewidth'] = path.get_linewidth()
style['dasharray'] = get_dasharray(path)
style['zorder'] = path.get_zorder()
return style
def get_line_style(line):
"""Get the style dictionary for matplotlib line objects"""
style = {}
style['alpha'] = line.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['color'] = color_to_hex(line.get_color())
style['linewidth'] = line.get_linewidth()
style['dasharray'] = get_dasharray(line)
style['zorder'] = line.get_zorder()
return style
def get_marker_style(line):
"""Get the style dictionary for matplotlib marker objects"""
style = {}
style['alpha'] = line.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['facecolor'] = color_to_hex(line.get_markerfacecolor())
style['edgecolor'] = color_to_hex(line.get_markeredgecolor())
style['edgewidth'] = line.get_markeredgewidth()
style['marker'] = line.get_marker()
markerstyle = MarkerStyle(line.get_marker())
markersize = line.get_markersize()
markertransform = (markerstyle.get_transform()
+ Affine2D().scale(markersize, -markersize))
style['markerpath'] = SVG_path(markerstyle.get_path(),
markertransform)
style['markersize'] = markersize
style['zorder'] = line.get_zorder()
return style
def get_text_style(text):
"""Return the text style dict for a text instance"""
style = {}
style['alpha'] = text.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['fontsize'] = text.get_size()
style['color'] = color_to_hex(text.get_color())
style['halign'] = text.get_horizontalalignment() # left, center, right
style['valign'] = text.get_verticalalignment() # baseline, center, top
style['malign'] = text._multialignment # text alignment when '\n' in text
style['rotation'] = text.get_rotation()
style['zorder'] = text.get_zorder()
return style
def get_axis_properties(axis):
"""Return the property dictionary for a matplotlib.Axis instance"""
props = {}
label1On = axis._major_tick_kw.get('label1On', True)
if isinstance(axis, matplotlib.axis.XAxis):
if label1On:
props['position'] = "bottom"
else:
props['position'] = "top"
elif isinstance(axis, matplotlib.axis.YAxis):
if label1On:
props['position'] = "left"
else:
props['position'] = "right"
else:
raise ValueError("{0} should be an Axis instance".format(axis))
# Use tick values if appropriate
locator = axis.get_major_locator()
props['nticks'] = len(locator())
if isinstance(locator, ticker.FixedLocator):
props['tickvalues'] = list(locator())
else:
props['tickvalues'] = None
# Find tick formats
formatter = axis.get_major_formatter()
if isinstance(formatter, ticker.NullFormatter):
props['tickformat'] = ""
elif isinstance(formatter, ticker.FixedFormatter):
props['tickformat'] = list(formatter.seq)
elif not any(label.get_visible() for label in axis.get_ticklabels()):
props['tickformat'] = ""
else:
props['tickformat'] = None
# Get axis scale
props['scale'] = axis.get_scale()
# Get major tick label size (assumes that's all we really care about!)
labels = axis.get_ticklabels()
if labels:
props['fontsize'] = labels[0].get_fontsize()
else:
props['fontsize'] = None
# Get associated grid
props['grid'] = get_grid_style(axis)
# get axis visibility
props['visible'] = axis.get_visible()
return props
def get_grid_style(axis):
gridlines = axis.get_gridlines()
if axis._gridOnMajor and len(gridlines) > 0:
color = color_to_hex(gridlines[0].get_color())
alpha = gridlines[0].get_alpha()
dasharray = get_dasharray(gridlines[0])
return dict(gridOn=True,
color=color,
dasharray=dasharray,
alpha=alpha)
else:
return {"gridOn": False}
def get_figure_properties(fig):
return {'figwidth': fig.get_figwidth(),
'figheight': fig.get_figheight(),
'dpi': fig.dpi}
def get_axes_properties(ax):
props = {'axesbg': color_to_hex(ax.patch.get_facecolor()),
'axesbgalpha': ax.patch.get_alpha(),
'bounds': ax.get_position().bounds,
'dynamic': ax.get_navigate(),
'axison': ax.axison,
'frame_on': ax.get_frame_on(),
'patch_visible':ax.patch.get_visible(),
'axes': [get_axis_properties(ax.xaxis),
get_axis_properties(ax.yaxis)]}
for axname in ['x', 'y']:
axis = getattr(ax, axname + 'axis')
domain = getattr(ax, 'get_{0}lim'.format(axname))()
lim = domain
if isinstance(axis.converter, matplotlib.dates.DateConverter):
scale = 'date'
if pd and isinstance(axis.converter, pd.tseries.converter.PeriodConverter):
_dates = [pd.Period(ordinal=int(d), freq=axis.freq)
for d in domain]
domain = [(d.year, d.month - 1, d.day,
d.hour, d.minute, d.second, 0)
for d in _dates]
else:
domain = [(d.year, d.month - 1, d.day,
d.hour, d.minute, d.second,
d.microsecond * 1E-3)
for d in matplotlib.dates.num2date(domain)]
else:
scale = axis.get_scale()
if scale not in ['date', 'linear', 'log']:
raise ValueError("Unknown axis scale: "
"{0}".format(axis.get_scale()))
props[axname + 'scale'] = scale
props[axname + 'lim'] = lim
props[axname + 'domain'] = domain
return props
def iter_all_children(obj, skipContainers=False):
"""
Returns an iterator over all children and nested children using
obj's get_children() method
if skipContainers is true, only childless objects are returned.
"""
if hasattr(obj, 'get_children') and len(obj.get_children()) > 0:
for child in obj.get_children():
if not skipContainers:
yield child
# could use `yield from` in python 3...
for grandchild in iter_all_children(child, skipContainers):
yield grandchild
else:
yield obj
def get_legend_properties(ax, legend):
handles, labels = ax.get_legend_handles_labels()
visible = legend.get_visible()
return {'handles': handles, 'labels': labels, 'visible': visible}
def image_to_base64(image):
"""
Convert a matplotlib image to a base64 png representation
Parameters
----------
image : matplotlib image object
The image to be converted.
Returns
-------
image_base64 : string
The UTF8-encoded base64 string representation of the png image.
"""
ax = image.axes
binary_buffer = io.BytesIO()
# image is saved in axes coordinates: we need to temporarily
# set the correct limits to get the correct image
lim = ax.axis()
ax.axis(image.get_extent())
image.write_png(binary_buffer)
ax.axis(lim)
binary_buffer.seek(0)
return base64.b64encode(binary_buffer.read()).decode('utf-8')
| phobson/bokeh | bokeh/core/compat/mplexporter/utils.py | Python | bsd-3-clause | 11,503 |
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from provider.utils import get_provider
@view_config(route_name='pyramid-social-auth.auth', request_method='GET')
def auth(request):
provider_name = request.matchdict.get('provider')
if provider_name not in request.registry.settings:
raise LookupError('no settings for provider "%s" specified' % provider_name)
settings = request.registry.settings[provider_name]
token = request.session.get_csrf_token()
provider = get_provider(provider_name)(settings['client_id'], settings['client_secret'],
request.registry.settings['application_name'],
request.route_url('pyramid-social-auth.complete', provider=provider_name),
state=token)
return HTTPFound(provider.auth())
@view_config(route_name='pyramid-social-auth.complete', request_method='GET')
def complete(request):
pass | marinewater/pyramid-social-auth | pyramid_app/views.py | Python | mit | 1,027 |
# The MIT License(MIT)
# Copyright (c) 2013-2014 Matt Thomson
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from hamcrest import assert_that, equal_to
from mock import patch, Mock
import pytest
from pyembed.core import discovery
def test_should_find_oembed_urls():
expected_urls = [
'http://example.com/oembed?format=json',
'http://example.com/oembed?format=xml'
]
assert_that(get_oembed_urls(), equal_to(expected_urls))
def test_should_find_oembed_urls_using_json_when_specified():
expected_url = 'http://example.com/oembed?format=json'
assert_that(get_oembed_urls(oembed_format='json'), equal_to([expected_url]))
def test_should_find_oembed_urls_using_xml_when_specified():
expected_url = 'http://example.com/oembed?format=xml'
assert_that(get_oembed_urls(oembed_format='xml'), equal_to([expected_url]))
def test_should_return_xml_if_json_not_present():
expected_url = 'http://example.com/oembed?format=xml'
assert_that(get_oembed_urls(fixture='no_json_oembed.html'),
equal_to([expected_url]))
def test_should_find_oembed_urls_using_json_with_relative_url():
expected_url = 'http://example.com/oembed?format=json'
assert_that(get_oembed_urls(fixture='relative_url.html', oembed_format='json'),
equal_to([expected_url]))
def test_should_find_oembed_urls_using_xml_with_relative_url():
expected_url = 'http://example.com/oembed?format=xml'
assert_that(get_oembed_urls(fixture='relative_url.html', oembed_format='xml'),
equal_to([expected_url]))
def test_should_return_empty_if_href_not_present():
assert_that(get_oembed_urls(fixture='json_oembed_no_href.html'), equal_to([]))
def test_should_return_empty_for_invalid_html():
assert_that(get_oembed_urls(fixture='invalid.html'), equal_to([]))
def test_should_return_empty_on_error_response():
assert_that(get_oembed_urls(ok=False), equal_to([]))
def test_should_throw_error_when_invalid_oembed_format_specified():
with pytest.raises(discovery.PyEmbedDiscoveryError):
get_oembed_urls(oembed_format='txt')
def get_oembed_urls(fixture='valid_oembed.html',
oembed_format=None,
ok=True):
with patch('requests.get') as mock_get:
response = Mock()
response.ok = ok
response.text = open(
'pyembed/core/test/fixtures/auto_discovery/' + fixture).read()
mock_get.return_value = response
result = discovery.AutoDiscoverer().get_oembed_urls(
'http://example.com', oembed_format)
mock_get.assert_called_with('http://example.com')
return result
| tino/pyembed | pyembed/core/test/auto_discovery_test.py | Python | mit | 3,677 |
""" Visualize :class:`~pySPACE.resources.data_types.feature_vector.FeatureVector` elements"""
import itertools
import pylab
import numpy
try:
import mdp.nodes
except:
pass
from pySPACE.missions.nodes.base_node import BaseNode
class LLEVisNode(BaseNode):
""" Show a 2d scatter plot of all :class:`~pySPACE.resources.data_types.feature_vector.FeatureVector` based on Locally Linear Embedding (LLE) from MDP
This node collects all training examples it obtains along with their
label. It computes than an embedding of all these examples in a 2d space
using the "Locally Linear Embedding" algorithm and plots a scatter plot of
the examples in this space.
**Parameters**
:neighbors:
The number of neighbor vectors that should be considered for each
instance during locally linear embedding
(*optional, default: 15*)
**Exemplary Call**
.. code-block:: yaml
-
node : Time_Series_Source
-
node : All_Train_Splitter
-
node : Time_Domain_Features
-
node : LLE_Vis
parameters :
neighbors : 10
-
node : Nil_Sink
Known Issues:
This node will use pylab.show() to show the figure. There is no store
method implemented yet. On Macs, pylab.show() might sometimes fail due to
a wrong plotting backend. A possible workaround in that case is to
manually set the plotting backend to 'MacOSX'. This has to be done before
pylab is imported, so one can temporarily add "import matplotlib;
matplotlib.use('MacOSX')" to the very beginning of launch.py.
:Author: Jan Hendrik Metzen (jhm@informatik.uni-bremen.de)
:Created: 2009/07/07
"""
def __init__(self, neighbors = 15, **kwargs):
super(LLEVisNode, self).__init__(**kwargs)
self.set_permanent_attributes(
neighbors = neighbors,
# A set of colors that can be used to distinguish different classes
colors = set(["r", "b"]),
# A mapping from class label to its color in the plot
class_colors = dict(),
# Remembers the classes (colors) of the instances seen
instance_colors = [],
#
instances = []
)
pylab.ion()
figure = pylab.figure(figsize=(21, 11))
figure.subplots_adjust(left=0.01, bottom=0.01, right=0.99, top= 0.99,
wspace=0.2, hspace=0.2)
pylab.draw()
def is_trainable(self):
""" Returns whether this node is trainable. """
# Though this node is not really trainable, it returns true in order
# to get trained. The reason is that during this training phase,
# it visualizes all samChannel_Visples that are passed as arguments
return True
def is_supervised(self):
""" Returns whether this node requires supervised training """
return True
def _get_train_set(self, use_test_data):
""" Returns the data that can be used for training """
# We take data that is provided by the input node for training
# NOTE: This might involve training of the preceding nodes
train_set = self.input_node.request_data_for_training(use_test_data)
# Add the data provided by the input node for testing to the
# training set
# NOTE: This node is not really learning but just collecting all
# examples. Because of that it must take
# all data for training (even when use_test_data is False)
train_set = itertools.chain(train_set,
self.input_node.request_data_for_testing())
return train_set
def _train(self, data, label):
"""
This node is not really trained but uses the labeled examples to
generate a scatter plot.
"""
# Determine color of this class if not yet done
if label not in self.class_colors.keys():
self.class_colors[label] = self.colors.pop()
# Stor the given example along with its class (encoded in the color)
self.instances.append(data)
self.instance_colors.append(self.class_colors[label])
def _stop_training(self, debug=False):
""" Stops the training, i.e. create the 2d representation
Uses the Locally Linear Embedding algorithm to create a 2d
representation of the data and creates a 2d scatter plot.
"""
instances = numpy.vstack(self.instances)
# Compute LLE and project the data
lle_projected_data = mdp.nodes.LLENode(k=self.neighbors,
output_dim=2)(instances)
# Create scatter plot of the projected data
pylab.scatter(lle_projected_data[:,0], lle_projected_data[:,1],
c = self.instance_colors)
pylab.show()
def _execute(self, data):
# We simply pass the given data on to the next node
return data
_NODE_MAPPING = {"LLE_Vis": LLEVisNode}
| pyspace/test | pySPACE/missions/nodes/visualization/feature_vector_vis.py | Python | gpl-3.0 | 5,341 |
import os
import subprocess
from deriva.core import format_exception
from launcher.impl import LauncherTask, Task
class SubprocessTask(LauncherTask):
def __init__(self, parent=None):
super(SubprocessTask, self).__init__(parent)
class ViewerTask(SubprocessTask):
def __init__(self, executable, is_owner, proc_output_path=None, parent=None):
super(SubprocessTask, self).__init__(parent)
self.executable = executable
self.is_owner = is_owner
self.proc_output_path = proc_output_path
def result_callback(self, success, result):
self.set_status(success,
"Viewer subprocess execution success" if success else "Viewer subprocess execution failed",
"" if success else format_exception(result),
self.is_owner)
def run(self, file_path, working_dir=os.getcwd(), env=None):
self.task = Task(self._execute,
[self.executable, file_path, working_dir, self.proc_output_path, env],
self.result_callback)
self.start()
@staticmethod
def _execute(executable, file_path, working_dir, proc_output_path=None, env=None):
out = subprocess.PIPE
if proc_output_path:
try:
out = open(proc_output_path, "wb")
except OSError:
pass
command = [executable, file_path]
process = subprocess.Popen(command,
cwd=working_dir,
env=env,
stdin=subprocess.PIPE,
stdout=out,
stderr=subprocess.STDOUT)
ret = process.wait()
try:
out.flush()
out.close()
except:
pass
del process
if ret != 0:
raise RuntimeError('Non-zero viewer exit status %s!' % ret)
| informatics-isi-edu/synspy | launcher/launcher/impl/process_tasks.py | Python | bsd-3-clause | 1,975 |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from .base import ArticleTestBase
from wiki.models import Article, URLPath
from wiki.plugins.attachments.models import Attachment
__doc__ = """
Tests that the custom queryset methods work, this is important
because the pattern of building them is different from Django
1.5 to 1.6 to 1.7 so there will be 3 patterns in play at the
same time.
"""
class ArticlManagerTests(ArticleTestBase):
def test_queryset_methods_directly_on_manager(self):
self.assertEqual(
Article.objects.can_read(self.superuser1).count(), 1
)
self.assertEqual(
Article.objects.can_write(self.superuser1).count(), 1
)
self.assertEqual(Article.objects.active().count(), 1)
def test_queryset_methods_on_querysets(self):
self.assertEqual(
Article.objects.all().can_read(self.superuser1).count(), 1
)
self.assertEqual(
Article.objects.all().can_write(self.superuser1).count(), 1
)
self.assertEqual(Article.objects.all().active().count(), 1)
# See: https://code.djangoproject.com/ticket/22817
def test_queryset_empty_querysets(self):
self.assertEqual(
Article.objects.none().can_read(self.superuser1).count(), 0
)
self.assertEqual(
Article.objects.none().can_write(self.superuser1).count(), 0
)
self.assertEqual(Article.objects.none().active().count(), 0)
class AttachmentManagerTests(ArticleTestBase):
def test_queryset_methods_directly_on_manager(self):
# Do the same for Attachment which uses ArtickeFkManager
self.assertEqual(
Attachment.objects.can_read(self.superuser1).count(), 0
)
self.assertEqual(
Attachment.objects.can_write(self.superuser1).count(), 0
)
self.assertEqual(Attachment.objects.active().count(), 0)
def test_queryset_methods_on_querysets(self):
self.assertEqual(
Attachment.objects.all().can_read(self.superuser1).count(), 0
)
self.assertEqual(
Attachment.objects.all().can_write(self.superuser1).count(), 0
)
self.assertEqual(Attachment.objects.all().active().count(), 0)
# See: https://code.djangoproject.com/ticket/22817
def test_queryset_empty_query_sets(self):
self.assertEqual(
Attachment.objects.none().can_read(self.superuser1).count(), 0
)
self.assertEqual(
Attachment.objects.none().can_write(self.superuser1).count(), 0
)
self.assertEqual(Attachment.objects.none().active().count(), 0)
class URLPathManagerTests(ArticleTestBase):
def test_related_manager_works_with_filters(self):
root = URLPath.root()
self.assertNotIn(root.id, [p.id for p in root.children.active()])
| PolyLAN/django-wiki | wiki/tests/test_managers.py | Python | gpl-3.0 | 2,973 |
#!/usr/bin/python2
from sys import argv
import os.path
from distutils import sysconfig
def getProgramsMenuPath():
"""getProgramsMenuPath() -> String|None
@return the filesystem location of the common Start Menu.
"""
try:
return get_special_folder_path("CSIDL_COMMON_PROGRAMS")
except OSError: # probably Win98
return get_special_folder_path("CSIDL_PROGRAMS")
if argv[1] == '-install':
try:
print "Installing shortcut"
exec_dir=sysconfig.get_config_var("exec_prefix")
print "Python in "+exec_dir
menu_path=getProgramsMenuPath()
print "Programs menu in "+menu_path
#soar_shortcut_path="SoaR.lnk"
soar_shortcut_path=os.path.join(menu_path, "soar.lnk")
print "soar shortcut installing to "+soar_shortcut_path
soar_shortcut_path=os.path.join(menu_path, "soar.lnk")
#CMax_shortcut_path="CMax.lnk"
CMax_shortcut_path=os.path.join(menu_path, "CMax.lnk")
print "CMax shortcut installing to "+CMax_shortcut_path
CMax_shortcut_path=os.path.join(menu_path, "CMax.lnk")
create_shortcut(os.path.join(exec_dir, "python.exe"),
"soar",
soar_shortcut_path,
'-Qnew -c "import form.main;import soar;import soar.application;form.main.Application(soar.application.application)"')
create_shortcut(os.path.join(exec_dir, "python.exe"),
"CMax",
CMax_shortcut_path,
os.path.join(exec_dir, "Lib\site-packages\lib601\CMaxMain.pyc"))
print "Done"
except:
print sys.exc_info()
else:
print "This script is designed to be run from the Windows installer."
| Cynary/distro6.01 | arch/6.01Soft/lib601-F13-4/build/scripts-2.7/installsoar.py | Python | mit | 1,742 |
#!/usr/bin/env python3
"""EOTF functions"""
import eotf_bt1886
import eotf_hlg
import eotf_pq
import eotf_gamma_2_2
import eotf_gamma_2_4
eotfs = [ eotf_bt1886,
eotf_hlg,
eotf_pq,
eotf_gamma_2_2,
eotf_gamma_2_4,
]
| arvehj/jvcprojectortools | eotf.py | Python | apache-2.0 | 265 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='{{ cookiecutter.repo_name }}',
version='{{ cookiecutter.version }}',
description="{{ cookiecutter.project_short_description }}",
long_description=readme + '\n\n' + changelog,
author="{{ cookiecutter.full_name }}",
author_email='{{ cookiecutter.email }}',
url='https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}',
packages=[
'{{ cookiecutter.package_name }}',
],
package_dir={'{{ cookiecutter.package_name }}':
'{{ cookiecutter.package_name }}'},
include_package_data=True,
install_requires=requirements,
license="MIT",
zip_safe=False,
keywords='{{ cookiecutter.package_name }}',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
| zoidbergwill/cookiecutter-demands-pypackage | {{cookiecutter.repo_name}}/setup.py | Python | mit | 1,701 |
def add(s, i, x):
n = len(s)
while i < n:
s[i] += x
i += (i&(-i))
def get(s, i):
ret = 0
while i != 0:
ret += s[i]
i -= (i&(-i))
return ret
def find(s, k):
n = len(s)
beg = 0
end = n
tt = get(s, n-1)
while beg < end:
mid = (beg + end) // 2
t = get(s, mid)
if tt - t >= k:
beg = mid + 1
else:
end = mid
return end
def findR(s, k):
n = len(s)
tt = get(s, n-1)
tk = tt - k
if tk < 0:
return 0
i = 0
w = 1
while w * 2 < n:
w *= 2
while w > 0:
while i+w >= n or s[i+w] > tk:
w //= 2
if w == 0:
break
if w == 0:
break
tk -= s[i+w]
i += w
w //= 2
#print("findR tk:", tk, i+1)
return i+1
class Solution:
def subarraysWithKDistinct(self, A: List[int], K: int) -> int:
n = len(A)
#print(n)
pre = [-1 for _ in range(n+10)]
lst = [-1 for _ in range(n+10)]
for i in range(n):
pre[i] = lst[A[i]]
lst[A[i]] = i
s = [0 for _ in range(n+10)]
for i in range(n+1):
if lst[i] == -1:
continue
add(s, lst[i]+1, 1)
ans = 0
for i in range(n-1, -1, -1):
#print("i:", i)
#for j in range(n+1):
# print(j, get(s, j))
#print("findR:", findR(s, K), findR(s, K+1))
#print("find:", find(s, K), find(s, K+1))
if get(s, len(s) - 1) < K:
break
ans += findR(s, K) - findR(s, K+1)
add(s, i+1, -1)
if pre[i] != -1:
add(s, pre[i]+1, 1)
return ans
| FiveEye/ProblemSet | LeetCode/lc992.py | Python | mit | 1,820 |
# -*- coding: utf-8 -*-
from argh.decorators import arg
from lain_admin_cli.helpers import Node, Container, is_backupd_enabled
from lain_admin_cli.helpers import yes_or_no, info, error, warn, _yellow, volume_dir
from subprocess import check_output, check_call, CalledProcessError
import requests, os, json, time
@arg('-p', '--playbooks', required=True)
@arg('--with-volume')
@arg('--ignore-volume')
@arg('-t', '--target')
@arg('containers', nargs='+')
def drift(containers, with_volume=False, ignore_volume=False, playbooks="", target=""):
if with_volume and ignore_volume:
error("--with-volume and --ignore-volume is mutual option")
return
target = Node(target) if target != "" else None
try:
containers = [Container(c) for c in containers]
nodes = [Node(c.host) for c in containers]
except Exception as e:
error("Fail to get container or node info, %s" % (str(e)))
return
info("Drifting %s to %s", ["%s/%s" % (c.host, c.name) for c in containers],
target.name if target else "a random node")
if not yes_or_no("Are you sure?", default='no', color=_yellow):
return
for container in containers:
if len(container.volumes) > 0:
if not (with_volume or ignore_volume):
warn("container %s having lain volumes,"
"you may need run `drift --ignore-volume[--with-volume] ...` to drift it,"
"ignore this container." % container.name)
continue
if not target and with_volume:
warn("container %s having lain volumes, target node required to drift with volume." % container.name)
warn("run `drift --with-volume -t[--target] somenode ...`")
continue
node = Node(container.host)
drift_container(node, container, target, playbooks, with_volume, ignore_volume)
if len(container.volumes) > 0 and is_backupd_enabled():
fix_backupd(container, node, target)
def fix_backupd(container, source, target):
try:
tf = open("/mfs/lain/backup/%s/.meta" % target.ip, 'rb')
except IOError as e:
target_meta = {}
else:
target_meta = json.loads(tf.read())
tf.close()
try:
sf = open("/mfs/lain/backup/%s/.meta" % source.ip, 'rb')
except IOError as e:
return # backup file do not exist
else:
source_meta = json.loads(sf.read())
sf.close()
changed = False
for volume in container.volumes:
data = source_meta.get(volume, None)
if not data:
continue
target_meta[volume] = []
for item in data:
info("Fix backup for %s" % item['name'])
source_file = "/mfs/lain/backup/%s/%s" % (source.ip, item['name'])
target_file = "/mfs/lain/backup/%s/%s" % (target.ip, item['name'])
try:
cmd = 'mkdir -p /mfs/lain/backup/%s && cp -r %s %s' % (target.ip, source_file, target_file)
check_call(['/bin/bash', '-c', cmd])
except CalledProcessError as e:
error(str(e))
warn("You may need to move %s to %s by hands" % (source_file, target_file))
continue
else:
target_meta[volume].append(item)
changed = True
if changed:
try:
tf = open("/mfs/lain/backup/%s/.meta" % target.ip, 'w+')
tf.write(json.dumps(target_meta))
tf.close()
except IOError as e:
warn(str(e))
warn("Fail to create meta on target node, check this by hand" % target.ip)
def drift_volumes(playbooks_path, containers, source, target):
volumes = reduce(lambda x, y: x + y.volumes, containers, [])
ids = reduce(lambda x, y: x + [y.info['Id']], containers, [])
var_file = "/tmp/ansible-variables"
with open(var_file, 'wb') as f:
f.write('{"volumes":%s,"ids":"%s"}'%(json.dumps(volumes), ' '.join(ids)))
cmd = ['ansible-playbook', '-i', os.path.join(playbooks_path, 'cluster')]
cmd += ['-e', 'target=nodes']
cmd += ['-e', 'target_node=%s'%target.name]
cmd += ['-e', 'from_node=%s'%source.name]
cmd += ['-e', 'from_ip=%s'%source.ip]
cmd += ['-e', 'role=drift']
cmd += ['-e', 'var_file=%s'%var_file]
cmd += [os.path.join(playbooks_path, 'role.yaml')]
info('cmd is: %s', ' '.join(cmd))
check_call(cmd)
os.remove(var_file)
def warm_up_on_target(playbooks_path, containers, target):
to_drift_images = reduce(lambda x, y: x + [y.info['Config']['Image']],
containers, [])
cmd = ['ansible-playbook', '-i', os.path.join(playbooks_path, 'cluster')]
cmd += ['-e', 'target=nodes']
cmd += ['-e', 'target_node=%s' % target.name]
cmd += ['-e', 'role=drift-warm-up']
cmd += ['-e', 'to_drift_images=%s' % to_drift_images]
cmd += [os.path.join(playbooks_path, 'role.yaml')]
info('cmd is: %s', ' '.join(cmd))
check_call(cmd)
def drift_container(from_node, container, to_node, playbooks_path, with_volume, ignore_volume):
if container.appname == 'deploy':
key = '/lain/deployd/pod_groups/deploy/deploy.web.web'
data = json.loads(check_output(['etcdctl', 'get', key]))
if len(data['Pods']) <= 1:
warn("Deployd is not HA now, can not drift it."
"you should scale it to 2+ instance first."
"ignore container %s" % container.name)
return
elif container.appname == 'webrouter':
if not yes_or_no("Make sure %s exist on %s" % (container.info['Config']['Image'], to_node.name),
default='no', color=_yellow):
return
url = "http://deployd.lain:9003/api/nodes?cmd=drift&from=%s&pg=%s&pg_instance=%s" % (
from_node.name, container.podname, container.instance
)
url += "&force=true" if with_volume or ignore_volume else ""
url += "&to=%s" % to_node.name if to_node else ""
if to_node:
## Warm-up on target node
info("Warm-up on target node...")
warm_up_on_target(playbooks_path, [container], to_node)
else:
info("No specified target node, skip warm-up...")
## Drift volumes
if with_volume and len(container.volumes) > 0:
info("Drift the volume...")
drift_volumes(playbooks_path, [container], from_node, to_node)
info("Stop the container %s" % container.name)
try:
check_output(['docker', '-H', 'swarm.lain:2376', 'stop', container.info['Id']])
except CalledProcessError:
# container may not existed now, removed by deployd, ignore errors
error("Fail to stop the container %s" % container.name)
return
info("Drift the volume again...")
drift_volumes(playbooks_path, [container], from_node, to_node)
## Call deployd api
info("PATCH %s" % url)
resp = requests.patch(url)
if resp.status_code >= 300:
error("Deployd drift api response a error, %s." % resp.text)
## waiting for deployd complete
drifted_container_name = "%s.%s.%s.v%s-i%s-d%s" % (
container.appname, container.proctype, container.procname,
container.version, container.instance, container.drift+1
)
print(">>>(need some minutes)Waiting for deployd drift %s to %s..." % (container.name, drifted_container_name))
while True:
try:
output = check_output(['docker', '-H', 'swarm.lain:2376', 'inspect', drifted_container_name])
except CalledProcessError:
time.sleep(3)
else:
new_container = json.loads(output)[0]
info("%s/%s => %s%s drifted success" % (container.host, container.name,
new_container['Node']['Name'],
new_container['Name']))
break
| laincloud/lainctl | lain_admin_cli/drift.py | Python | mit | 7,980 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20141104_2241'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='slack_nickname',
field=models.CharField(max_length=255, null=True, blank=True),
preserve_default=True,
),
]
| Stupeflix/japper | japper/users/migrations/0005_userprofile_slack_nickname.py | Python | mpl-2.0 | 470 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from warehouse.admin.services import ISponsorLogoStorage
from warehouse.utils.static import ManifestCacheBuster
def includeme(config):
sponsorlogos_storage_class = config.maybe_dotted(
config.registry.settings["sponsorlogos.backend"]
)
config.register_service_factory(
sponsorlogos_storage_class.create_service, ISponsorLogoStorage
)
# Setup Jinja2 Rendering for the Admin application
config.add_jinja2_search_path("templates", name=".html")
# Setup our static assets
prevent_http_cache = config.get_settings().get("pyramid.prevent_http_cache", False)
config.add_static_view(
"admin/static",
"warehouse.admin:static/dist",
# Don't cache at all if prevent_http_cache is true, else we'll cache
# the files for 10 years.
cache_max_age=0 if prevent_http_cache else 10 * 365 * 24 * 60 * 60,
)
config.add_cache_buster(
"warehouse.admin:static/dist/",
ManifestCacheBuster(
"warehouse.admin:static/dist/manifest.json",
reload=config.registry.settings["pyramid.reload_assets"],
strict=not prevent_http_cache,
),
)
config.whitenoise_add_files("warehouse.admin:static/dist/", prefix="/admin/static/")
config.whitenoise_add_manifest(
"warehouse.admin:static/dist/manifest.json", prefix="/admin/static/"
)
# Add our routes
config.include(".routes")
# Add our flags
config.include(".flags")
| pypa/warehouse | warehouse/admin/__init__.py | Python | apache-2.0 | 2,024 |
# -*- coding:utf-8 -*-
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110- 1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# ----------------------------------------------------------
# Author: Stephen Leger (s-leger)
#
# ----------------------------------------------------------
# noinspection PyUnresolvedReferences
import bpy
import time
# noinspection PyUnresolvedReferences
from bpy.types import Operator, PropertyGroup, Mesh, Panel
from bpy.props import (
FloatProperty, BoolProperty, IntProperty,
StringProperty, EnumProperty,
CollectionProperty
)
from .bmesh_utils import BmeshEdit as bmed
from random import randint
import bmesh
from mathutils import Vector, Matrix
from math import sin, cos, pi, atan2, sqrt, tan
from .archipack_manipulator import Manipulable, archipack_manipulator
from .archipack_2d import Line, Arc
from .archipack_preset import ArchipackPreset, PresetMenuOperator
from .archipack_object import ArchipackCreateTool, ArchipackObject
from .archipack_cutter import (
CutAblePolygon, CutAbleGenerator,
ArchipackCutter,
ArchipackCutterPart
)
from .archipack_polylines import Io, ShapelyOps
from .archipack_dimension import DimensionProvider
class Roof():
def __init__(self):
self.angle_0 = 0
self.v0_idx = 0
self.v1_idx = 0
self.constraint_type = None
self.slope_left = 1
self.slope_right = 1
self.width_left = 1
self.width_right = 1
self.auto_left = 'AUTO'
self.auto_right = 'AUTO'
self.type = 'SIDE'
# force hip or valley
self.enforce_part = 'AUTO'
self.triangular_end = False
# seg is part of hole
self.is_hole = False
def copy_params(self, s):
s.angle_0 = self.angle_0
s.v0_idx = self.v0_idx
s.v1_idx = self.v1_idx
s.constraint_type = self.constraint_type
s.slope_left = self.slope_left
s.slope_right = self.slope_right
s.width_left = self.width_left
s.width_right = self.width_right
s.auto_left = self.auto_left
s.auto_right = self.auto_right
s.type = self.type
s.enforce_part = self.enforce_part
s.triangular_end = self.triangular_end
# segment is part of hole / slice
s.is_hole = self.is_hole
@property
def copy(self):
s = StraightRoof(self.p.copy(), self.v.copy())
self.copy_params(s)
return s
def straight(self, length, t=1):
s = self.copy
s.p = self.lerp(t)
s.v = self.v.normalized() * length
return s
def set_offset(self, offset, last=None):
"""
Offset line and compute intersection point
between segments
"""
self.line = self.make_offset(offset, last)
def offset(self, offset):
o = self.copy
o.p += offset * self.cross_z.normalized()
return o
@property
def oposite(self):
o = self.copy
o.p += o.v
o.v = -o.v
return o
@property
def t_diff(self):
return self.t_end - self.t_start
def straight_roof(self, a0, length):
s = self.straight(length).rotate(a0)
r = StraightRoof(s.p, s.v)
r.angle_0 = a0
return r
def curved_roof(self, a0, da, radius):
n = self.normal(1).rotate(a0).scale(radius)
if da < 0:
n.v = -n.v
c = n.p - n.v
r = CurvedRoof(c, radius, n.angle, da)
r.angle_0 = a0
return r
class StraightRoof(Roof, Line):
def __str__(self):
return "p0:{} p1:{}".format(self.p0, self.p1)
def __init__(self, p, v):
Line.__init__(self, p, v)
Roof.__init__(self)
class CurvedRoof(Roof, Arc):
def __str__(self):
return "t_start:{} t_end:{} dist:{}".format(self.t_start, self.t_end, self.dist)
def __init__(self, c, radius, a0, da):
Arc.__init__(self, c, radius, a0, da)
Roof.__init__(self)
class RoofSegment():
"""
Roof part with 2 polygons
and "axis" StraightRoof segment
"""
def __init__(self, seg, left, right):
self.seg = seg
self.left = left
self.right = right
self.a0 = 0
self.reversed = False
class RoofAxisNode():
"""
Connection between parts
for radial analysis
"""
def __init__(self):
# axis segments
self.segs = []
self.root = None
self.center = 0
# store count of horizontal segs
self.n_horizontal = 0
# store count of slopes segs
self.n_slope = 0
@property
def count(self):
return len(self.segs)
@property
def last(self):
"""
last segments in this node
"""
return self.segs[-1]
def left(self, index):
if index + 1 >= self.count:
return self.segs[0]
return self.segs[index + 1]
def right(self, index):
return self.segs[index - 1]
def add(self, a0, reversed, seg, left, right):
if seg.constraint_type == 'HORIZONTAL':
self.n_horizontal += 1
elif seg.constraint_type == 'SLOPE':
self.n_slope += 1
s = RoofSegment(seg, left, right)
s.a0 = a0
s.reversed = reversed
if reversed:
self.root = s
self.segs.append(s)
def update_center(self):
for i, s in enumerate(self.segs):
if s is self.root:
self.center = i
return
# sort tree segments by angle
def partition(self, array, begin, end):
pivot = begin
for i in range(begin + 1, end + 1):
if array[i].a0 < array[begin].a0:
pivot += 1
array[i], array[pivot] = array[pivot], array[i]
array[pivot], array[begin] = array[begin], array[pivot]
return pivot
def sort(self):
def _quicksort(array, begin, end):
if begin >= end:
return
pivot = self.partition(array, begin, end)
_quicksort(array, begin, pivot - 1)
_quicksort(array, pivot + 1, end)
end = len(self.segs) - 1
_quicksort(self.segs, 0, end)
# index of root in segs array
self.update_center()
class RoofPolygon(CutAblePolygon):
"""
ccw roof pitch boundary
closed by explicit segment
handle triangular shape with zero axis length
mov <_________________
| /\
| | rot
| | left last <> next
\/_____axis_______>|
node <_____axis________ next
| /\
| | rot
| | right last <> next
mov \/________________>|
side angle
"""
def __init__(self, axis, side, fake_axis=None):
"""
Create a default rectangle
axis from node to next
slope float -z for 1 in side direction
side in ['LEFT', 'RIGHT'] in axis direction
NOTE:
when axis length is null (eg: triangular shape)
use "fake_axis" with a 1 length to handle
distance from segment
"""
if side == 'LEFT':
# slope
self.slope = axis.slope_left
# width
self.width = axis.width_left
# constraint width
self.auto_mode = axis.auto_left
else:
# slope
self.slope = axis.slope_right
# width
self.width = axis.width_right
# constraint width
self.auto_mode = axis.auto_right
self.side = side
# backward deps
self.backward = False
# pointers to neighboors along axis
self.last = None
self.next = None
self.other_side = None
# axis segment
if side == 'RIGHT':
self.axis = axis.oposite
else:
self.axis = axis
self.fake_axis = None
# _axis is either a fake one or real one
# to prevent further check
if fake_axis is None:
self._axis = self.axis
self.fake_axis = self.axis
self.next_cross = axis
self.last_cross = axis
else:
if side == 'RIGHT':
self.fake_axis = fake_axis.oposite
else:
self.fake_axis = fake_axis
self._axis = self.fake_axis
# unit vector perpendicular to axis
# looking at outside part
v = self.fake_axis.sized_normal(0, -1)
self.cross = v
self.next_cross = v
self.last_cross = v
self.convex = True
# segments from axis end in ccw order
# closed by explicit segment
self.segs = []
# holes
self.holes = []
# Triangular ends
self.node_tri = False
self.next_tri = False
self.is_tri = False
# sizes
self.tmin = 0
self.tmax = 1
self.dt = 1
self.ysize = 0
self.xsize = 0
self.vx = Vector()
self.vy = Vector()
self.vz = Vector()
def move_node(self, p):
"""
Move slope point in node side
"""
if self.side == 'LEFT':
self.segs[-1].p0 = p
self.segs[2].p1 = p
else:
self.segs[2].p0 = p
self.segs[1].p1 = p
def move_next(self, p):
"""
Move slope point in next side
"""
if self.side == 'LEFT':
self.segs[2].p0 = p
self.segs[1].p1 = p
else:
self.segs[-1].p0 = p
self.segs[2].p1 = p
def node_link(self, da):
angle_90 = round(pi / 2, 4)
if self.side == 'LEFT':
idx = -1
else:
idx = 1
da = abs(round(da, 4))
type = "LINK"
if da < angle_90:
type += "_VALLEY"
elif da > angle_90:
type += "_HIP"
self.segs[idx].type = type
def next_link(self, da):
angle_90 = round(pi / 2, 4)
if self.side == 'LEFT':
idx = 1
else:
idx = -1
da = abs(round(da, 4))
type = "LINK"
if da < angle_90:
type += "_VALLEY"
elif da > angle_90:
type += "_HIP"
self.segs[idx].type = type
def bind(self, last, ccw=False):
"""
always in axis real direction
"""
# backward dependancy relative to axis
if last.backward:
self.backward = self.side == last.side
if self.side == last.side:
last.next_cross = self.cross
else:
last.last_cross = self.cross
self.last_cross = last.cross
# axis of last / next segments
if self.backward:
self.next = last
last.last = self
else:
self.last = last
last.next = self
# width auto
if self.auto_mode == 'AUTO':
self.width = last.width
self.slope = last.slope
elif self.auto_mode == 'WIDTH' and self.width != 0:
self.slope = last.slope * last.width / self.width
elif self.auto_mode == 'SLOPE' and self.slope != 0:
self.width = last.width * last.slope / self.slope
self.make_segments()
last.make_segments()
res, p, t = self.segs[2].intersect(last.segs[2])
if res:
# dont move anything when no intersection found
# aka when delta angle == 0
self.move_node(p)
if self.side != last.side:
last.move_node(p)
else:
last.move_next(p)
# Free mode
# move border
# and find intersections
# with sides
if self.auto_mode == 'ALL':
s0 = self._axis.offset(-self.width)
res, p0, t = self.segs[1].intersect(s0)
if res:
self.segs[2].p0 = p0
self.segs[1].p1 = p0
res, p1, t = self.segs[-1].intersect(s0)
if res:
self.segs[2].p1 = p1
self.segs[-1].p0 = p1
# /\
# | angle
# |____>
#
# v1 node -> next
if self.side == 'LEFT':
v1 = self._axis.v
else:
v1 = -self._axis.v
if last.side == self.side:
# contigous, v0 node <- next
# half angle between segments
if self.side == 'LEFT':
v0 = -last._axis.v
else:
v0 = last._axis.v
da = v0.angle_signed(v1)
if ccw:
if da < 0:
da = 2 * pi + da
elif da > 0:
da = da - 2 * pi
last.next_link(0.5 * da)
else:
# alternate v0 node -> next
# half angle between segments
if last.side == 'LEFT':
v0 = last._axis.v
else:
v0 = -last._axis.v
da = v0.angle_signed(v1)
# angle always ccw
if ccw:
if da < 0:
da = 2 * pi + da
elif da > 0:
da = da - 2 * pi
last.node_link(0.5 * da)
self.node_link(-0.5 * da)
def next_seg(self, index):
idx = self.get_index(index + 1)
return self.segs[idx]
def last_seg(self, index):
return self.segs[index - 1]
def make_segments(self):
if len(self.segs) < 1:
s0 = self._axis
w = self.width
s1 = s0.straight(w, 1).rotate(pi / 2)
s1.type = 'SIDE'
s3 = s0.straight(w, 0).rotate(pi / 2).oposite
s3.type = 'SIDE'
s2 = StraightRoof(s1.p1, s3.p0 - s1.p1)
s2.type = 'BOTTOM'
self.segs = [s0, s1, s2, s3]
def move_side(self, pt):
"""
offset side to point
"""
s2 = self.segs[2]
d0, t = self.distance(s2.p0)
d1, t = self.distance(pt)
# adjust width and slope according
self.width = d1
self.slope = self.slope * d0 / d1
self.segs[2] = s2.offset(d1 - d0)
def propagate_backward(self, pt):
"""
Propagate slope, keep 2d angle of slope
Move first point and border
keep border parallel
adjust slope
and next shape
"""
# distance of p
# offset side to point
self.move_side(pt)
# move verts on node side
self.move_next(pt)
if self.side == 'LEFT':
# move verts on next side
res, p, t = self.segs[-1].intersect(self.segs[2])
else:
# move verts on next side
res, p, t = self.segs[1].intersect(self.segs[2])
if res:
self.move_node(p)
if self.next is not None and self.next.auto_mode in {'AUTO'}:
self.next.propagate_backward(p)
def propagate_forward(self, pt):
"""
Propagate slope, keep 2d angle of slope
Move first point and border
keep border parallel
adjust slope
and next shape
"""
# offset side to point
self.move_side(pt)
# move verts on node side
self.move_node(pt)
if self.side == 'LEFT':
# move verts on next side
res, p, t = self.segs[1].intersect(self.segs[2])
else:
# move verts on next side
res, p, t = self.segs[-1].intersect(self.segs[2])
if res:
self.move_next(p)
if self.next is not None and self.next.auto_mode in {'AUTO'}:
self.next.propagate_forward(p)
def rotate_next_slope(self, a0):
"""
Rotate next slope part
"""
if self.side == 'LEFT':
s0 = self.segs[1].rotate(a0)
s1 = self.segs[2]
res, p, t = s1.intersect(s0)
else:
s0 = self.segs[2]
s1 = self.segs[-1]
res, p, t = s1.oposite.rotate(-a0).intersect(s0)
if res:
s1.p0 = p
s0.p1 = p
if self.next is not None:
if self.next.auto_mode == 'ALL':
return
if self.next.backward:
self.next.propagate_backward(p)
else:
self.next.propagate_forward(p)
def rotate_node_slope(self, a0):
"""
Rotate node slope part
"""
if self.side == 'LEFT':
s0 = self.segs[2]
s1 = self.segs[-1]
res, p, t = s1.oposite.rotate(-a0).intersect(s0)
else:
s0 = self.segs[1].rotate(a0)
s1 = self.segs[2]
res, p, t = s1.intersect(s0)
if res:
s1.p0 = p
s0.p1 = p
if self.next is not None:
if self.next.auto_mode == 'ALL':
return
if self.next.backward:
self.next.propagate_backward(p)
else:
self.next.propagate_forward(p)
def distance(self, pt):
"""
distance from axis
always use fake_axis here to
allow axis being cut and
still work
"""
res, d, t = self.fake_axis.point_sur_segment(pt)
return d, t
def altitude(self, pt):
d, t = self.distance(pt)
return -d * self.slope
def uv(self, pt):
d, t = self.distance(pt)
return ((t - self.tmin) * self.xsize, d)
def intersect(self, seg):
"""
compute intersections of a segment with boundaries
segment must start on axis
return segments inside
"""
it = []
for s in self.segs:
res, p, t, u = seg.intersect_ext(s)
if res:
it.append((t, p))
return it
def merge(self, other):
raise NotImplementedError
def draw(self, context, z, verts, edges):
f = len(verts)
#
# 0_______1
# |_______|
# 3 2
verts.extend([(s.p0.x, s.p0.y, z + self.altitude(s.p0)) for s in self.segs])
n_segs = len(self.segs) - 1
edges.extend([[f + i, f + i + 1] for i in range(n_segs)])
edges.append([f + n_segs, f])
"""
f = len(verts)
verts.extend([(s.p1.x, s.p1.y, z + self.altitude(s.p1)) for s in self.segs])
n_segs = len(self.segs) - 1
edges.extend([[f + i, f + i + 1] for i in range(n_segs)])
edges.append([f + n_segs, f])
"""
# holes
for hole in self.holes:
f = len(verts)
#
# 0_______1
# |_______|
# 3 2
verts.extend([(s.p0.x, s.p0.y, z + self.altitude(s.p0)) for s in hole.segs])
n_segs = len(hole.segs) - 1
edges.extend([[f + i, f + i + 1] for i in range(n_segs)])
edges.append([f + n_segs, f])
# axis
"""
f = len(verts)
verts.extend([self.axis.p0.to_3d(), self.axis.p1.to_3d()])
edges.append([f, f + 1])
# cross
f = len(verts)
verts.extend([self.axis.lerp(0.5).to_3d(), (self.axis.lerp(0.5) + self.cross.v).to_3d()])
edges.append([f, f + 1])
"""
# relationships arrows
if self.next or self.last:
w = 0.2
s0 = self._axis.offset(-0.5 * self.ysize)
p0 = s0.lerp(0.4).to_3d()
p0.z = z
p1 = s0.lerp(0.6).to_3d()
p1.z = z
if self.side == 'RIGHT':
p0, p1 = p1, p0
if self.backward:
p0, p1 = p1, p0
s1 = s0.sized_normal(0.5, w)
s2 = s0.sized_normal(0.5, -w)
f = len(verts)
p2 = s1.p1.to_3d()
p2.z = z
p3 = s2.p1.to_3d()
p3.z = z
verts.extend([p1, p0, p2, p3])
edges.extend([[f + 1, f], [f + 2, f], [f + 3, f]])
def as_string(self):
"""
Print strips relationships
"""
if self.backward:
dir = "/\\"
print("%s next" % (dir))
else:
dir = "\\/"
print("%s node" % (dir))
print("%s %s" % (dir, self.side))
if self.backward:
print("%s node" % (dir))
else:
print("%s next" % (dir))
if self.next:
print("_________")
self.next.as_string()
else:
print("#########")
def limits(self):
dist = []
param_t = []
for s in self.segs:
res, d, t = self.fake_axis.point_sur_segment(s.p0)
param_t.append(t)
dist.append(d)
if len(param_t) > 0:
self.tmin = min(param_t)
self.tmax = max(param_t)
else:
self.tmin = 0
self.tmax = 1
self.dt = self.tmax - self.tmin
if len(dist) > 0:
self.ysize = max(dist)
else:
self.ysize = 0
self.xsize = self.fake_axis.length * self.dt
# vectors components of part matrix
# where x is is axis direction
# y down
# z up
vx = -self.fake_axis.v.normalized().to_3d()
vy = Vector((-vx.y, vx.x, self.slope)).normalized()
self.vx = vx
self.vy = vy
self.vz = vx.cross(vy)
"""
import bpy
import bmesh
def print_list(name, lst, cols):
size = len(lst)
rows = 1 + int(size / cols)
print("%s" % "{} = [\n {}\n ]\n".format(name,
",\n ".join(
[", ".join([str(lst[r * cols + i]) for i in range(cols) if r * cols + i < size])
for r in range(rows)
])
))
def dump_mesh(m, cols, rounding):
verts = [(round(v.co.x, rounding), round(v.co.y, rounding), round(v.co.z, rounding)) for v in m.vertices]
faces = [tuple(p.vertices) for p in m.polygons]
bpy.ops.object.mode_set(mode='EDIT')
bm = bmesh.from_edit_mesh(m)
edges = [tuple(i.index for i in edge.verts) for edge in bm.edges]
uvs = []
layer = bm.loops.layers.uv.verify()
for i, face in enumerate(bm.faces):
uv = []
for j, loop in enumerate(face.loops):
co = loop[layer].uv
uv.append((round(co.x, rounding), round(co.y, rounding)))
uvs.append(uv)
matids = [p.material_index for p in m.polygons]
print_list("verts", verts, cols)
print_list("faces", faces, cols)
print_list("matids", matids, cols)
print_list("uvs", uvs, cols)
def dump_curve(m, cols, rounding):
verts = [(round(v.co.x, rounding), round(v.co.y, rounding), round(v.co.z, rounding)) for v in m.points]
print_list("verts", verts, cols)
cols = 3
rounding = 3
m = C.object.data
dump_mesh(m, cols, rounding)
for c in m.splines:
dump_curve(c, cols, rounding)
"""
class RoofGenerator(CutAbleGenerator):
def __init__(self, d, origin=Vector((0, 0, 0))):
self.d = d
self.parts = d.parts
self.segs = []
self.nodes = []
self.pans = []
self.length = 0
self.origin = origin.to_2d()
self.z = origin.z
self.width_right = d.width_right
self.width_left = d.width_left
self.slope_left = d.slope_left
self.slope_right = d.slope_right
self.user_defined_tile = None
self.user_defined_uvs = None
self.user_defined_mat = None
self.is_t_child = d.t_parent != ""
def add_part(self, part):
if len(self.segs) < 1 or part.bound_idx < 1:
s = None
else:
s = self.segs[part.bound_idx - 1]
a0 = part.a0
if part.constraint_type == 'SLOPE' and a0 == 0:
a0 = 90
# start a new roof
if s is None:
v = part.length * Vector((cos(a0), sin(a0)))
s = StraightRoof(self.origin, v)
else:
s = s.straight_roof(a0, part.length)
# parent segment (root) index is v0_idx - 1
s.v0_idx = min(len(self.segs), part.bound_idx)
s.constraint_type = part.constraint_type
if part.constraint_type == 'SLOPE':
s.enforce_part = part.enforce_part
else:
s.enforce_part = 'AUTO'
s.angle_0 = a0
s.take_precedence = part.take_precedence
s.auto_right = part.auto_right
s.auto_left = part.auto_left
s.width_left = part.width_left
s.width_right = part.width_right
s.slope_left = part.slope_left
s.slope_right = part.slope_right
s.type = 'AXIS'
s.triangular_end = part.triangular_end
self.segs.append(s)
def locate_manipulators(self):
"""
"""
for i, f in enumerate(self.segs):
manipulators = self.parts[i].manipulators
p0 = f.p0.to_3d()
p0.z = self.z
p1 = f.p1.to_3d()
p1.z = self.z
# angle from last to current segment
if i > 0:
manipulators[0].type_key = 'ANGLE'
v0 = self.segs[f.v0_idx - 1].straight(-1, 1).v.to_3d()
v1 = f.straight(1, 0).v.to_3d()
manipulators[0].set_pts([p0, v0, v1])
# segment length
manipulators[1].type_key = 'SIZE'
manipulators[1].prop1_name = "length"
manipulators[1].set_pts([p0, p1, (1.0, 0, 0)])
# dumb segment id
manipulators[2].set_pts([p0, p1, (1, 0, 0)])
p0 = f.lerp(0.5).to_3d()
p0.z = self.z
# size left
p1 = f.sized_normal(0.5, -self.parts[i].width_left).p1.to_3d()
p1.z = self.z
manipulators[3].set_pts([p0, p1, (1, 0, 0)])
# size right
p1 = f.sized_normal(0.5, self.parts[i].width_right).p1.to_3d()
p1.z = self.z
manipulators[4].set_pts([p0, p1, (-1, 0, 0)])
# slope left
n0 = f.sized_normal(0.5, -1)
p0 = n0.p1.to_3d()
p0.z = self.z
p1 = p0.copy()
p1.z = self.z - self.parts[i].slope_left
manipulators[5].set_pts([p0, p1, (-1, 0, 0)], normal=n0.v.to_3d())
# slope right
n0 = f.sized_normal(0.5, 1)
p0 = n0.p1.to_3d()
p0.z = self.z
p1 = p0.copy()
p1.z = self.z - self.parts[i].slope_right
manipulators[6].set_pts([p0, p1, (1, 0, 0)], normal=n0.v.to_3d())
def seg_partition(self, array, begin, end):
"""
sort tree segments by angle
"""
pivot = begin
for i in range(begin + 1, end + 1):
if array[i].a0 < array[begin].a0:
pivot += 1
array[i], array[pivot] = array[pivot], array[i]
array[pivot], array[begin] = array[begin], array[pivot]
return pivot
def sort_seg(self, array, begin=0, end=None):
# print("sort_child")
if end is None:
end = len(array) - 1
def _quicksort(array, begin, end):
if begin >= end:
return
pivot = self.seg_partition(array, begin, end)
_quicksort(array, begin, pivot - 1)
_quicksort(array, pivot + 1, end)
return _quicksort(array, begin, end)
def make_roof(self, context):
"""
Init data structure for possibly multi branched nodes
nodes : radial relationships
pans : quad strip linear relationships
"""
pans = []
# node are connected segments
# node
# (segment idx)
# (angle from root part > 0 right)
# (reversed) a seg connected by p1
# "root" of node
nodes = [RoofAxisNode() for s in range(len(self.segs) + 1)]
# Init width on seg 0
s0 = self.segs[0]
if self.parts[0].auto_left in {'AUTO', 'SLOPE'}:
s0.width_left = self.width_left
if self.parts[0].auto_right in {'AUTO', 'SLOPE'}:
s0.width_right = self.width_right
if self.parts[0].auto_left in {'AUTO', 'WIDTH'}:
s0.slope_left = self.slope_left
if self.parts[0].auto_left in {'AUTO', 'WIDTH'}:
s0.slope_right = self.slope_right
# make nodes with HORIZONTAL constraints
for idx, s in enumerate(self.segs):
s.v1_idx = idx + 1
if s.constraint_type == 'HORIZONTAL':
left = RoofPolygon(s, 'LEFT')
right = RoofPolygon(s, 'RIGHT')
left.other_side = right
right.other_side = left
rs = RoofSegment(s, left, right)
pans.append(rs)
nodes[s.v0_idx].add(s.angle_0, False, s, left, right)
nodes[s.v1_idx].add(-pi, True, s, left, right)
# set first node root
# so regular sort does work
nodes[0].root = nodes[0].segs[0]
self.nodes = nodes
# Propagate slope and width
# on node basis along axis
# bi-direction Radial around node
# from left and right to center
# contigous -> same
# T: and (x % 2 == 1)
# First one take precedence over others
# others inherit from side
#
# l / rb l = left
# 3 r = right
# l _1_ / b = backward
# r \
# 2
# r\ l
#
# X: rigth one r left one l (x % 2 == 0)
# inherits from side
#
# l 3 lb l = left
# l__1_|_2_l r = right
# r | r b = backward -> propagate in reverse axis direction
# r 4 rb
#
# for idx, node in enumerate(nodes):
# print("idx:%s node:%s" % (idx, node.root))
for idx, node in enumerate(nodes):
node.sort()
nb_segs = node.count
if node.root is None:
continue
left = node.root.left
right = node.root.right
# basic one single node
if nb_segs < 2:
left.make_segments()
right.make_segments()
continue
# get "root" slope and width
l_bind = left
r_bind = right
# simple case: 2 contigous segments
if nb_segs == 2:
s = node.last
s.right.bind(r_bind, ccw=False)
s.left.bind(l_bind, ccw=True)
continue
# More than 2 segments, uneven distribution
if nb_segs % 2 == 1:
# find wich child does take precedence
# first one on rootline (arbitrary)
center = (nb_segs - 1) / 2
else:
# even distribution
center = nb_segs / 2
# user defined precedence if any
for i, s in enumerate(node.segs):
if s.seg.take_precedence:
center = i
break
# bind right side to center
for i, s in enumerate(node.segs):
# skip axis
if i > 0:
if i < center:
# right contigous with last
s.right.bind(r_bind, ccw=False)
# next bind to left
r_bind = s.left
# left backward, not bound
# so setup width and slope
if s.left.auto_mode in {'AUTO', 'WIDTH'}:
s.left.slope = right.slope
if s.left.auto_mode in {'AUTO', 'SLOPE'}:
s.left.width = right.width
s.left.backward = True
else:
# right bound to last
s.right.bind(r_bind, ccw=False)
break
# bind left side to center
for i, s in enumerate(reversed(node.segs)):
# skip axis
if i < nb_segs - center - 1:
# left contigous with last
s.left.bind(l_bind, ccw=True)
# next bind to right
l_bind = s.right
# right backward, not bound
# so setup width and slope
if s.right.auto_mode in {'AUTO', 'WIDTH'}:
s.right.slope = left.slope
if s.right.auto_mode in {'AUTO', 'SLOPE'}:
s.right.width = left.width
s.right.backward = True
else:
# right bound to last
s.left.bind(l_bind, ccw=True)
break
# slope constraints allowed between segments
# multiple (up to 2) on start and end
# single between others
#
# 2 slope 2 slope 2 slope
# | | |
# |______section_1___|___section_2_____|
# | | |
# | | |
# multiple single multiple
# add slopes constraints to nodes
for i, s in enumerate(self.segs):
if s.constraint_type == 'SLOPE':
nodes[s.v0_idx].add(s.angle_0, False, s, None, None)
# sort nodes, remove duplicate slopes between
# horizontal, keeping only first one
for idx, node in enumerate(nodes):
to_remove = []
node.sort()
# remove dup between all
# but start / end nodes
if node.n_horizontal > 1:
last = None
for i, s in enumerate(node.segs):
if s.seg.constraint_type == last:
if s.seg.constraint_type == 'SLOPE':
to_remove.append(i)
last = s.seg.constraint_type
for i in reversed(to_remove):
node.segs.pop(i)
node.update_center()
for idx, node in enumerate(nodes):
# a node may contain many slopes
# 2 * (part starting from node - 1)
#
# s0
# root 0 |_______
# |
# s1
#
# s1
# root _______|
# |
# s0
#
# s3 3 s2
# l \l|r/ l
# root ___\|/___ 2
# r /|\ r
# /r|l\
# s0 1 s1
#
# s2 s1=slope
# |r /
# | / l
# |/____s
#
# root to first child -> equal side
# any other childs -> oposite sides
if node.n_horizontal == 1:
# slopes at start or end of segment
# segment slope is not affected
if node.n_slope > 0:
# node has user def slope
s = node.root
s0 = node.left(node.center)
a0 = s0.seg.delta_angle(s.seg)
if node.root.reversed:
# slope at end of segment
# first one is right or left
if a0 < 0:
# right side
res, p, t = s0.seg.intersect(s.right.segs[2])
s.right.segs[-1].p0 = p
s.right.segs[2].p1 = p
else:
# left side
res, p, t = s0.seg.intersect(s.left.segs[2])
s.left.segs[1].p1 = p
s.left.segs[2].p0 = p
if node.n_slope > 1:
# last one must be left
s1 = node.right(node.center)
a1 = s1.seg.delta_angle(s.seg)
# both slopes on same side:
# skip this one
if a0 > 0 and a1 < 0:
# right side
res, p, t = s1.seg.intersect(s.right.segs[2])
s.right.segs[-1].p0 = p
s.right.segs[2].p1 = p
if a0 < 0 and a1 > 0:
# left side
res, p, t = s1.seg.intersect(s.left.segs[2])
s.left.segs[1].p1 = p
s.left.segs[2].p0 = p
else:
# slope at start of segment
if a0 < 0:
# right side
res, p, t = s0.seg.intersect(s.right.segs[2])
s.right.segs[1].p1 = p
s.right.segs[2].p0 = p
else:
# left side
res, p, t = s0.seg.intersect(s.left.segs[2])
s.left.segs[-1].p0 = p
s.left.segs[2].p1 = p
if node.n_slope > 1:
# last one must be right
s1 = node.right(node.center)
a1 = s1.seg.delta_angle(s.seg)
# both slopes on same side:
# skip this one
if a0 > 0 and a1 < 0:
# right side
res, p, t = s1.seg.intersect(s.right.segs[2])
s.right.segs[1].p1 = p
s.right.segs[2].p0 = p
if a0 < 0 and a1 > 0:
# left side
res, p, t = s1.seg.intersect(s.left.segs[2])
s.left.segs[-1].p0 = p
s.left.segs[2].p1 = p
else:
# slopes between segments
# does change next segment slope
for i, s0 in enumerate(node.segs):
s1 = node.left(i)
s2 = node.left(i + 1)
if s1.seg.constraint_type == 'SLOPE':
# 3 cases:
# s0 is root contigous -> sides are same
# s2 is root contigous -> sides are same
# back to back -> sides are not same
if s0.reversed:
# contigous right / right
# 2 cases
# right is backward
# right is forward
if s2.right.backward:
# s0 depends on s2
main = s2.right
v = main.segs[1].v
else:
# s2 depends on s0
main = s0.right
v = -main.segs[-1].v
res, p, t = s1.seg.intersect(main.segs[2])
if res:
# slope vector
dp = p - s1.seg.p0
a0 = dp.angle_signed(v)
if s2.right.backward:
main.rotate_node_slope(a0)
else:
main.rotate_next_slope(-a0)
elif s2.reversed:
# contigous left / left
# 2 cases
# left is backward
# left is forward
if s0.left.backward:
# s0 depends on s2
main = s0.left
v = -main.segs[-1].v
else:
# s2 depends on s0
main = s2.left
v = main.segs[1].v
res, p, t = s1.seg.intersect(main.segs[2])
if res:
# slope vector
dp = p - s1.seg.p0
a0 = dp.angle_signed(v)
if s0.left.backward:
main.rotate_node_slope(-a0)
else:
main.rotate_next_slope(a0)
else:
# back left / right
# 2 cases
# left is backward
# left is forward
if s0.left.backward:
# s2 depends on s0
main = s0.left
v = -main.segs[-1].v
else:
# s0 depends on s2
main = s2.right
v = main.segs[1].v
res, p, t = s1.seg.intersect(main.segs[2])
if res:
# slope vector
dp = p - s1.seg.p0
a0 = dp.angle_signed(v)
if s0.left.backward:
main.rotate_node_slope(-a0)
else:
main.rotate_node_slope(a0)
self.pans = []
# triangular ends
for node in self.nodes:
if node.root is None:
continue
if node.n_horizontal == 1 and node.root.seg.triangular_end:
if node.root.reversed:
# Next side (segment end)
left = node.root.left
right = node.root.right
left.next_tri = True
right.next_tri = True
s0 = left.segs[1]
s1 = left.segs[2]
s2 = right.segs[-1]
s3 = right.segs[2]
p0 = s1.lerp(-left.width / s1.length)
p1 = s0.p0
p2 = s3.lerp(1 + right.width / s3.length)
# compute slope from points
p3 = p0.to_3d()
p3.z = -left.width * left.slope
p4 = p1.to_3d()
p5 = p2.to_3d()
p5.z = -right.width * right.slope
n = (p3 - p4).normalized().cross((p5 - p4).normalized())
v = n.cross(Vector((0, 0, 1)))
dz = n.cross(v)
# compute axis
s = StraightRoof(p1, v)
res, d0, t = s.point_sur_segment(p0)
res, d1, t = s.point_sur_segment(p2)
p = RoofPolygon(s, 'RIGHT')
p.make_segments()
p.slope = -dz.z / dz.to_2d().length
p.is_tri = True
p.cross = StraightRoof(p1, (p2 - p0)).sized_normal(0, -1)
p.next_cross = left.cross
p.last_cross = right.cross
right.next_cross = p.cross
left.next_cross = p.cross
# remove axis seg of tri
p.segs[-1].p0 = p0
p.segs[-1].p1 = p1
p.segs[2].p0 = p2
p.segs[2].p1 = p0
p.segs[1].p1 = p2
p.segs[1].p0 = p1
p.segs[1].type = 'LINK_HIP'
p.segs[-1].type = 'LINK_HIP'
p.segs.pop(0)
# adjust left and side borders
s0.p1 = p0
s1.p0 = p0
s2.p0 = p2
s3.p1 = p2
s0.type = 'LINK_HIP'
s2.type = 'LINK_HIP'
self.pans.append(p)
elif not self.is_t_child:
# no triangular part with t_child
# on "node" parent roof side
left = node.root.left
right = node.root.right
left.node_tri = True
right.node_tri = True
s0 = right.segs[1]
s1 = right.segs[2]
s2 = left.segs[-1]
s3 = left.segs[2]
p0 = s1.lerp(-right.width / s1.length)
p1 = s0.p0
p2 = s3.lerp(1 + left.width / s3.length)
# compute axis and slope from points
p3 = p0.to_3d()
p3.z = -right.width * right.slope
p4 = p1.to_3d()
p5 = p2.to_3d()
p5.z = -left.width * left.slope
n = (p3 - p4).normalized().cross((p5 - p4).normalized())
v = n.cross(Vector((0, 0, 1)))
dz = n.cross(v)
s = StraightRoof(p1, v)
p = RoofPolygon(s, 'RIGHT')
p.make_segments()
p.slope = -dz.z / dz.to_2d().length
p.is_tri = True
p.cross = StraightRoof(p1, (p2 - p0)).sized_normal(0, -1)
p.next_cross = right.cross
p.last_cross = left.cross
right.last_cross = p.cross
left.last_cross = p.cross
# remove axis seg of tri
p.segs[-1].p0 = p0
p.segs[-1].p1 = p1
p.segs[2].p0 = p2
p.segs[2].p1 = p0
p.segs[1].p1 = p2
p.segs[1].p0 = p1
p.segs[1].type = 'LINK_HIP'
p.segs[-1].type = 'LINK_HIP'
p.segs.pop(0)
# adjust left and side borders
s0.p1 = p0
s1.p0 = p0
s2.p0 = p2
s3.p1 = p2
s0.type = 'LINK_HIP'
s2.type = 'LINK_HIP'
self.pans.append(p)
# make flat array
for pan in pans:
self.pans.extend([pan.left, pan.right])
# merge contigous with 0 angle diff
to_remove = []
for i, pan in enumerate(self.pans):
if pan.backward:
next = pan.last
if next is not None:
# same side only can merge
if next.side == pan.side:
if round(next._axis.delta_angle(pan._axis), 4) == 0:
to_remove.append(i)
next.next = pan.next
next.last_cross = pan.last_cross
next.node_tri = pan.node_tri
next.slope = pan.slope
if pan.side == 'RIGHT':
if next.backward:
next._axis.p1 = pan._axis.p1
next.segs[1] = pan.segs[1]
next.segs[2].p0 = pan.segs[2].p0
else:
next._axis.p0 = pan._axis.p0
next.segs[-1] = pan.segs[-1]
next.segs[2].p1 = pan.segs[2].p1
else:
if next.backward:
next._axis.p0 = pan._axis.p0
next.segs[-1] = pan.segs[-1]
next.segs[2].p1 = pan.segs[2].p1
else:
next._axis.p1 = pan._axis.p1
next.segs[1] = pan.segs[1]
next.segs[2].p0 = pan.segs[2].p0
else:
next = pan.next
if next is not None:
# same side only can merge
if next.side == pan.side:
if round(next._axis.delta_angle(pan._axis), 4) == 0:
to_remove.append(i)
next.last = pan.last
next.last_cross = pan.last_cross
next.node_tri = pan.node_tri
next.slope = pan.slope
if pan.side == 'LEFT':
if next.backward:
next._axis.p1 = pan._axis.p1
next.segs[1] = pan.segs[1]
next.segs[2].p0 = pan.segs[2].p0
else:
next._axis.p0 = pan._axis.p0
next.segs[-1] = pan.segs[-1]
next.segs[2].p1 = pan.segs[2].p1
else:
if next.backward:
next._axis.p0 = pan._axis.p0
next.segs[-1] = pan.segs[-1]
next.segs[2].p1 = pan.segs[2].p1
else:
next._axis.p1 = pan._axis.p1
next.segs[1] = pan.segs[1]
next.segs[2].p0 = pan.segs[2].p0
for i in reversed(to_remove):
self.pans.pop(i)
# compute limits
for pan in self.pans:
pan.limits()
"""
for pan in self.pans:
if pan.last is None:
pan.as_string()
"""
return
def lambris(self, context, o, d):
idmat = 0
lambris_height = 0.02
alt = self.z - lambris_height
for pan in self.pans:
verts = []
faces = []
matids = []
uvs = []
f = len(verts)
verts.extend([(s.p0.x, s.p0.y, alt + pan.altitude(s.p0)) for s in pan.segs])
uvs.append([pan.uv(s.p0) for s in pan.segs])
n_segs = len(pan.segs)
face = [f + i for i in range(n_segs)]
faces.append(face)
matids.append(idmat)
bm = bmed.buildmesh(
context, o, verts, faces, matids=matids, uvs=uvs,
weld=False, clean=False, auto_smooth=True, temporary=True)
self.cut_holes(bm, pan)
bmesh.ops.dissolve_limit(bm,
angle_limit=0.01,
use_dissolve_boundaries=False,
verts=bm.verts,
edges=bm.edges,
delimit=1)
geom = bm.faces[:]
verts = bm.verts[:]
bmesh.ops.solidify(bm, geom=geom, thickness=0.0001)
bmesh.ops.translate(bm, vec=Vector((0, 0, lambris_height)), space=o.matrix_world, verts=verts)
# merge with object
bmed.bmesh_join(context, o, [bm], normal_update=True)
bpy.ops.object.mode_set(mode='OBJECT')
def couverture(self, context, o, d):
idmat = 7
rand = 3
ttl = len(self.pans)
if ttl < 1:
return
sx, sy, sz = d.tile_size_x, d.tile_size_y, d.tile_size_z
"""
/* Bevel offset_type slot values */
enum {
BEVEL_AMT_OFFSET,
BEVEL_AMT_WIDTH,
BEVEL_AMT_DEPTH,
BEVEL_AMT_PERCENT
};
"""
offset_type = 3
if d.tile_offset > 0:
offset = - d.tile_offset / 100
else:
offset = 0
if d.tile_model == 'BRAAS2':
t_pts = [Vector(p) for p in [
(0.06, -1.0, 1.0), (0.19, -1.0, 0.5), (0.31, -1.0, 0.5), (0.44, -1.0, 1.0),
(0.56, -1.0, 1.0), (0.69, -1.0, 0.5), (0.81, -1.0, 0.5), (0.94, -1.0, 1.0),
(0.06, 0.0, 0.5), (0.19, 0.0, 0.0), (0.31, 0.0, 0.0), (0.44, 0.0, 0.5),
(0.56, 0.0, 0.5), (0.69, 0.0, 0.0), (0.81, 0.0, 0.0), (0.94, 0.0, 0.5),
(-0.0, -1.0, 1.0), (-0.0, 0.0, 0.5), (1.0, -1.0, 1.0), (1.0, 0.0, 0.5)]]
t_faces = [
(16, 0, 8, 17), (0, 1, 9, 8), (1, 2, 10, 9), (2, 3, 11, 10),
(3, 4, 12, 11), (4, 5, 13, 12), (5, 6, 14, 13), (6, 7, 15, 14), (7, 18, 19, 15)]
elif d.tile_model == 'BRAAS1':
t_pts = [Vector(p) for p in [
(0.1, -1.0, 1.0), (0.2, -1.0, 0.5), (0.6, -1.0, 0.5), (0.7, -1.0, 1.0),
(0.1, 0.0, 0.5), (0.2, 0.0, 0.0), (0.6, 0.0, 0.0), (0.7, 0.0, 0.5),
(-0.0, -1.0, 1.0), (-0.0, 0.0, 0.5), (1.0, -1.0, 1.0), (1.0, 0.0, 0.5)]]
t_faces = [(8, 0, 4, 9), (0, 1, 5, 4), (1, 2, 6, 5), (2, 3, 7, 6), (3, 10, 11, 7)]
elif d.tile_model == 'ETERNIT':
t_pts = [Vector(p) for p in [
(0.11, -1.0, 1.0), (0.9, -1.0, 1.0), (0.0, -0.79, 0.79),
(1.0, -0.79, 0.79), (0.0, 2.0, -2.0), (1.0, 2.0, -2.0)]]
t_faces = [(0, 1, 3, 5, 4, 2)]
elif d.tile_model == 'ONDULEE':
t_pts = [Vector(p) for p in [
(0.0, -1.0, 0.1), (0.05, -1.0, 1.0), (0.1, -1.0, 0.1),
(0.15, -1.0, 1.0), (0.2, -1.0, 0.1), (0.25, -1.0, 1.0),
(0.3, -1.0, 0.1), (0.35, -1.0, 1.0), (0.4, -1.0, 0.1),
(0.45, -1.0, 1.0), (0.5, -1.0, 0.1), (0.55, -1.0, 1.0),
(0.6, -1.0, 0.1), (0.65, -1.0, 1.0), (0.7, -1.0, 0.1),
(0.75, -1.0, 1.0), (0.8, -1.0, 0.1), (0.85, -1.0, 1.0),
(0.9, -1.0, 0.1), (0.95, -1.0, 1.0), (1.0, -1.0, 0.1),
(0.0, 0.0, 0.0), (0.05, 0.0, 0.9), (0.1, 0.0, 0.0),
(0.15, 0.0, 0.9), (0.2, 0.0, 0.0), (0.25, 0.0, 0.9),
(0.3, 0.0, 0.0), (0.35, 0.0, 0.9), (0.4, 0.0, 0.0),
(0.45, 0.0, 0.9), (0.5, 0.0, 0.0), (0.55, 0.0, 0.9),
(0.6, 0.0, 0.0), (0.65, 0.0, 0.9), (0.7, 0.0, 0.0),
(0.75, 0.0, 0.9), (0.8, 0.0, 0.0), (0.85, 0.0, 0.9),
(0.9, 0.0, 0.0), (0.95, 0.0, 0.9), (1.0, 0.0, 0.0)]]
t_faces = [
(0, 1, 22, 21), (1, 2, 23, 22), (2, 3, 24, 23),
(3, 4, 25, 24), (4, 5, 26, 25), (5, 6, 27, 26),
(6, 7, 28, 27), (7, 8, 29, 28), (8, 9, 30, 29),
(9, 10, 31, 30), (10, 11, 32, 31), (11, 12, 33, 32),
(12, 13, 34, 33), (13, 14, 35, 34), (14, 15, 36, 35),
(15, 16, 37, 36), (16, 17, 38, 37), (17, 18, 39, 38),
(18, 19, 40, 39), (19, 20, 41, 40)]
elif d.tile_model == 'METAL':
t_pts = [Vector(p) for p in [
(0.0, -1.0, 0.0), (0.99, -1.0, 0.0), (1.0, -1.0, 0.0),
(0.0, 0.0, 0.0), (0.99, 0.0, 0.0), (1.0, 0.0, 0.0),
(0.99, -1.0, 1.0), (1.0, -1.0, 1.0), (1.0, 0.0, 1.0), (0.99, 0.0, 1.0)]]
t_faces = [(0, 1, 4, 3), (7, 2, 5, 8), (1, 6, 9, 4), (6, 7, 8, 9)]
elif d.tile_model == 'LAUZE':
t_pts = [Vector(p) for p in [
(0.75, -0.8, 0.8), (0.5, -1.0, 1.0), (0.25, -0.8, 0.8),
(0.0, -0.5, 0.5), (1.0, -0.5, 0.5), (0.0, 0.5, -0.5), (1.0, 0.5, -0.5)]]
t_faces = [(1, 0, 4, 6, 5, 3, 2)]
elif d.tile_model == 'PLACEHOLDER':
t_pts = [Vector(p) for p in [(0.0, -1.0, 1.0), (1.0, -1.0, 1.0), (0.0, 0.0, 0.0), (1.0, 0.0, 0.0)]]
t_faces = [(0, 1, 3, 2)]
elif d.tile_model == 'ROMAN':
t_pts = [Vector(p) for p in [
(0.18, 0.0, 0.3), (0.24, 0.0, 0.58), (0.76, 0.0, 0.58),
(0.82, 0.0, 0.3), (0.05, -1.0, 0.5), (0.14, -1.0, 0.8),
(0.86, -1.0, 0.8), (0.95, -1.0, 0.5), (0.45, 0.0, 0.5),
(0.36, 0.0, 0.2), (-0.36, 0.0, 0.2), (-0.45, -0.0, 0.5),
(0.32, -1.0, 0.7), (0.26, -1.0, 0.42), (-0.26, -1.0, 0.42),
(-0.32, -1.0, 0.7), (0.5, 0.0, 0.74), (0.5, -1.0, 1.0),
(-0.0, -1.0, 0.26), (-0.0, 0.0, 0.0)]
]
t_faces = [
(0, 4, 5, 1), (16, 17, 6, 2), (2, 6, 7, 3),
(13, 12, 8, 9), (18, 13, 9, 19), (15, 14, 10, 11),
(14, 18, 19, 10), (1, 5, 17, 16)
]
elif d.tile_model == 'ROUND':
t_pts = [Vector(p) for p in [
(0.0, -0.5, 0.5), (1.0, -0.5, 0.5), (0.0, 0.0, 0.0),
(1.0, 0.0, 0.0), (0.93, -0.71, 0.71), (0.78, -0.88, 0.88),
(0.39, -0.97, 0.97), (0.61, -0.97, 0.97), (0.07, -0.71, 0.71),
(0.22, -0.88, 0.88)]
]
t_faces = [(6, 7, 5, 4, 1, 3, 2, 0, 8, 9)]
else:
return
n_faces = len(t_faces)
t_uvs = [[(t_pts[i].x, t_pts[i].y) for i in f] for f in t_faces]
dx, dy = d.tile_space_x, d.tile_space_y
step = 100 / ttl
if d.quick_edit:
context.scene.archipack_progress_text = "Build tiles:"
for i, pan in enumerate(self.pans):
seg = pan.fake_axis
# compute base matrix top left of face
vx = pan.vx
vy = pan.vy
vz = pan.vz
x0, y0 = seg.lerp(pan.tmax)
z0 = self.z + d.tile_altitude
ysize_2d = (d.tile_border + pan.ysize)
space_x = pan.xsize + 2 * d.tile_side
space_y = ysize_2d * sqrt(1 + pan.slope * pan.slope)
n_x = 1 + int(space_x / dx)
n_y = 1 + int(space_y / dy)
if d.tile_fit_x:
dx = space_x / n_x
if d.tile_fit_y:
dy = space_y / n_y
if d.tile_alternate:
n_y += 1
tM = Matrix([
[vx.x, vy.x, vz.x, x0],
[vx.y, vy.y, vz.y, y0],
[vx.z, vy.z, vz.z, z0],
[0, 0, 0, 1]
])
verts = []
faces = []
matids = []
uvs = []
# steps for this pan
substep = step / n_y
# print("step:%s sub:%s" % (step, substep))
for k in range(n_y):
progress = step * i + substep * k
# print("progress %s" % (progress))
if d.quick_edit:
context.scene.archipack_progress = progress
y = k * dy
x0 = offset * dx - d.tile_side
nx = n_x
if d.tile_alternate and k % 2 == 1:
x0 -= 0.5 * dx
nx += 1
if d.tile_offset > 0:
nx += 1
for j in range(nx):
x = x0 + j * dx
lM = tM * Matrix([
[sx, 0, 0, x],
[0, sy, 0, -y],
[0, 0, sz, 0],
[0, 0, 0, 1]
])
v = len(verts)
verts.extend([lM * p for p in t_pts])
faces.extend([tuple(i + v for i in f) for f in t_faces])
mid = randint(idmat, idmat + rand)
t_mats = [mid for i in range(n_faces)]
matids.extend(t_mats)
uvs.extend(t_uvs)
# build temp bmesh and bissect
bm = bmed.buildmesh(
context, o, verts, faces, matids=matids, uvs=uvs,
weld=False, clean=False, auto_smooth=True, temporary=True)
# clean outer on convex parts
# pan.convex = False
remove = pan.convex
for s in pan.segs:
# seg without length lead to invalid normal
if s.length > 0:
if s.type == 'AXIS':
self.bissect(bm, s.p1.to_3d(), s.cross_z.to_3d(), clear_outer=remove)
elif s.type == 'BOTTOM':
s0 = s.offset(d.tile_border)
dz = pan.altitude(s0.p0)
vx = s0.v.to_3d()
vx.z = pan.altitude(s0.p1) - dz
vy = vz.cross(vx.normalized())
x, y = s0.p0
z = z0 + dz
self.bissect(bm, Vector((x, y, z)), -vy, clear_outer=remove)
elif s.type == 'SIDE':
p0 = s.p0 + s.cross_z.normalized() * d.tile_side
self.bissect(bm, p0.to_3d(), s.cross_z.to_3d(), clear_outer=remove)
elif s.type == 'LINK_VALLEY':
p0 = s.p0 - s.cross_z.normalized() * d.tile_couloir
self.bissect(bm, p0.to_3d(), s.cross_z.to_3d(), clear_outer=remove)
elif s.type in {'LINK_HIP', 'LINK'}:
self.bissect(bm, s.p0.to_3d(), s.cross_z.to_3d(), clear_outer=remove)
# when not convex, select and remove outer parts
if not pan.convex:
"""
/* del "context" slot values, used for operator too */
enum {
DEL_VERTS = 1,
DEL_EDGES,
DEL_ONLYFACES,
DEL_EDGESFACES,
DEL_FACES,
/* A version of 'DEL_FACES' that keeps edges on face boundaries,
* allowing the surrounding edge-loop to be kept from removed face regions. */
DEL_FACES_KEEP_BOUNDARY,
DEL_ONLYTAGGED
};
"""
# Build boundary including borders and bottom offsets
new_s = None
segs = []
for s in pan.segs:
if s.length > 0:
if s.type == 'LINK_VALLEY':
offset = -d.tile_couloir
elif s.type == 'BOTTOM':
offset = d.tile_border
elif s.type == 'SIDE':
offset = d.tile_side
else:
offset = 0
new_s = s.make_offset(offset, new_s)
segs.append(new_s)
if len(segs) > 0:
# last / first intersection
res, p, t = segs[0].intersect(segs[-1])
if res:
segs[0].p0 = p
segs[-1].p1 = p
f_geom = [f for f in bm.faces if not pan.inside(f.calc_center_median().to_2d(), segs)]
if len(f_geom) > 0:
bmesh.ops.delete(bm, geom=f_geom, context=5)
self.cut_holes(bm, pan)
bmesh.ops.dissolve_limit(bm,
angle_limit=0.01,
use_dissolve_boundaries=False,
verts=bm.verts[:],
edges=bm.edges[:],
delimit=1)
if d.tile_bevel:
geom = bm.verts[:]
geom.extend(bm.edges[:])
bmesh.ops.bevel(bm,
geom=geom,
offset=d.tile_bevel_amt,
offset_type=offset_type,
segments=d.tile_bevel_segs,
profile=0.5,
vertex_only=False,
clamp_overlap=True,
material=-1)
if d.tile_solidify:
geom = bm.faces[:]
verts = bm.verts[:]
bmesh.ops.solidify(bm, geom=geom, thickness=0.0001)
bmesh.ops.translate(bm, vec=vz * d.tile_height, space=o.matrix_world, verts=verts)
# merge with object
bmed.bmesh_join(context, o, [bm], normal_update=True)
bpy.ops.object.mode_set(mode='OBJECT')
if d.quick_edit:
context.scene.archipack_progress = -1
def _bargeboard(self, s, i, boundary, pan,
width, height, altitude, offset, idmat,
verts, faces, edges, matids, uvs):
f = len(verts)
s0 = s.offset(offset - width)
s1 = s.offset(offset)
p0 = s0.p0
p1 = s1.p0
p2 = s0.p1
p3 = s1.p1
s2 = boundary.last_seg(i)
s3 = boundary.next_seg(i)
if s2.type == 'SIDE':
# intersect last seg offset
s4 = s2.offset(offset - width)
s5 = s2.offset(offset)
res, p, t = s4.intersect(s0)
if res:
p0 = p
res, p, t = s5.intersect(s1)
if res:
p1 = p
elif s2.type == 'AXIS' or 'LINK' in s2.type:
# intersect axis or link seg
res, p, t = s2.intersect(s0)
if res:
p0 = p
res, p, t = s2.intersect(s1)
if res:
p1 = p
if s3.type == 'SIDE':
# intersect next seg offset
s4 = s3.offset(offset - width)
s5 = s3.offset(offset)
res, p, t = s4.intersect(s0)
if res:
p2 = p
res, p, t = s5.intersect(s1)
if res:
p3 = p
elif s3.type == 'AXIS' or 'LINK' in s3.type:
# intersect axis or link seg
res, p, t = s3.intersect(s0)
if res:
p2 = p
res, p, t = s3.intersect(s1)
if res:
p3 = p
x0, y0 = p0
x1, y1 = p1
x2, y2 = p3
x3, y3 = p2
z0 = self.z + altitude + pan.altitude(p0)
z1 = self.z + altitude + pan.altitude(p1)
z2 = self.z + altitude + pan.altitude(p3)
z3 = self.z + altitude + pan.altitude(p2)
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3),
])
z0 -= height
z1 -= height
z2 -= height
z3 -= height
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3),
])
faces.extend([
# top
(f, f + 1, f + 2, f + 3),
# sides
(f, f + 4, f + 5, f + 1),
(f + 1, f + 5, f + 6, f + 2),
(f + 2, f + 6, f + 7, f + 3),
(f + 3, f + 7, f + 4, f),
# bottom
(f + 4, f + 7, f + 6, f + 5)
])
edges.append([f, f + 3])
edges.append([f + 1, f + 2])
edges.append([f + 4, f + 7])
edges.append([f + 5, f + 6])
matids.extend([idmat, idmat, idmat, idmat, idmat, idmat])
uvs.extend([
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)]
])
def bargeboard(self, d, verts, faces, edges, matids, uvs):
#####################
# Vire-vents
#####################
idmat = 1
for pan in self.pans:
for hole in pan.holes:
for i, s in enumerate(hole.segs):
if s.type == 'SIDE':
self._bargeboard(s,
i,
hole, pan,
d.bargeboard_width,
d.bargeboard_height,
d.bargeboard_altitude,
d.bargeboard_offset,
idmat,
verts,
faces,
edges,
matids,
uvs)
for i, s in enumerate(pan.segs):
if s.type == 'SIDE':
self._bargeboard(s,
i,
pan, pan,
d.bargeboard_width,
d.bargeboard_height,
d.bargeboard_altitude,
d.bargeboard_offset,
idmat,
verts,
faces,
edges,
matids,
uvs)
def _fascia(self, s, i, boundary, pan, tri_0, tri_1,
width, height, altitude, offset, idmat,
verts, faces, edges, matids, uvs):
f = len(verts)
s0 = s.offset(offset)
s1 = s.offset(offset + width)
s2 = boundary.last_seg(i)
s3 = boundary.next_seg(i)
s4 = s2
s5 = s3
p0 = s0.p0
p1 = s1.p0
p2 = s0.p1
p3 = s1.p1
# find last neighboor depending on type
if s2.type == 'AXIS' or 'LINK' in s2.type:
# apply only on boundarys
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s6 = pan.next_cross
else:
s6 = pan.last_cross
if tri_0:
s2 = s.copy
else:
s2 = s2.oposite
s2.v = (s.sized_normal(0, 1).v + s6.v).normalized()
s4 = s2
elif s2.type == 'SIDE':
s2 = s.copy
s2.type = 'SIDE'
s2.v = s.sized_normal(0, 1).v
s4 = s2
else:
s2 = s2.offset(offset)
s4 = s2.offset(offset + width)
# find next neighboor depending on type
if s3.type == 'AXIS' or 'LINK' in s3.type:
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s6 = pan.last_cross
else:
s6 = pan.next_cross
if tri_1:
s3 = s.oposite
else:
s3 = s3.copy
s3.v = (s.sized_normal(0, 1).v + s6.v).normalized()
s5 = s3
elif s3.type == 'SIDE':
# when next is side, use perpendicular
s3 = s.oposite
s3.type = 'SIDE'
s3.v = s.sized_normal(0, 1).v
s5 = s3
else:
s3 = s3.offset(offset)
s5 = s3.offset(offset + width)
# units vectors and scale
# is unit normal on sides
# print("s.p:%s, s.v:%s s1.p::%s s1.v::%s" % (s.p, s.v, s1.p, s1.v))
res, p, t = s0.intersect(s2)
if res:
p0 = p
res, p, t = s0.intersect(s3)
if res:
p1 = p
res, p, t = s1.intersect(s4)
if res:
p2 = p
res, p, t = s1.intersect(s5)
if res:
p3 = p
x0, y0 = p0
x1, y1 = p2
x2, y2 = p3
x3, y3 = p1
z0 = self.z + altitude + pan.altitude(p0)
z1 = self.z + altitude + pan.altitude(p2)
z2 = self.z + altitude + pan.altitude(p3)
z3 = self.z + altitude + pan.altitude(p1)
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3),
])
z0 -= height
z1 -= height
z2 -= height
z3 -= height
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3),
])
faces.extend([
# top
(f, f + 1, f + 2, f + 3),
# sides
(f, f + 4, f + 5, f + 1),
(f + 1, f + 5, f + 6, f + 2),
(f + 2, f + 6, f + 7, f + 3),
(f + 3, f + 7, f + 4, f),
# bottom
(f + 4, f + 7, f + 6, f + 5)
])
edges.append([f, f + 3])
edges.append([f + 1, f + 2])
edges.append([f + 4, f + 7])
edges.append([f + 5, f + 6])
matids.extend([idmat, idmat, idmat, idmat, idmat, idmat])
uvs.extend([
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)]
])
def fascia(self, d, verts, faces, edges, matids, uvs):
#####################
# Larmiers
#####################
idmat = 2
for pan in self.pans:
for hole in pan.holes:
for i, s in enumerate(hole.segs):
if s.type == 'BOTTOM':
self._fascia(s,
i,
hole, pan,
False, False,
d.fascia_width,
d.fascia_height,
d.fascia_altitude,
d.fascia_offset,
idmat,
verts,
faces,
edges,
matids,
uvs)
for i, s in enumerate(pan.segs):
if s.type == 'BOTTOM':
tri_0 = pan.node_tri
tri_1 = pan.next_tri
# triangular ends apply on boundary only
# unless cut, boundary is parallel to axis
# except for triangular ends
if pan.side == 'LEFT':
tri_0, tri_1 = tri_1, tri_0
self._fascia(s,
i,
pan, pan,
tri_0, tri_1,
d.fascia_width,
d.fascia_height,
d.fascia_altitude,
d.fascia_offset,
idmat,
verts,
faces,
edges,
matids,
uvs)
continue
f = len(verts)
s0 = s.offset(d.fascia_width)
s1 = pan.last_seg(i)
s2 = pan.next_seg(i)
# triangular ends apply on boundary only
# unless cut, boundary is parallel to axis
# except for triangular ends
tri_0 = (pan.node_tri and not s.is_hole) or pan.is_tri
tri_1 = (pan.next_tri and not s.is_hole) or pan.is_tri
if pan.side == 'LEFT':
tri_0, tri_1 = tri_1, tri_0
# tiangular use bottom segment direction
# find last neighboor depending on type
if s1.type == 'AXIS' or 'LINK' in s1.type:
# apply only on boundarys
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s3 = pan.next_cross
else:
s3 = pan.last_cross
if tri_0:
s1 = s.copy
else:
s1 = s1.oposite
s1.v = (s.sized_normal(0, 1).v + s3.v).normalized()
elif s1.type == 'SIDE':
s1 = s.copy
s1.type = 'SIDE'
s1.v = s.sized_normal(0, 1).v
else:
s1 = s1.offset(d.fascia_width)
# find next neighboor depending on type
if s2.type == 'AXIS' or 'LINK' in s2.type:
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s3 = pan.last_cross
else:
s3 = pan.next_cross
if tri_1:
s2 = s.oposite
else:
s2 = s2.copy
s2.v = (s.sized_normal(0, 1).v + s3.v).normalized()
elif s2.type == 'SIDE':
s2 = s.oposite
s2.type = 'SIDE'
s2.v = s.sized_normal(0, 1).v
else:
s2 = s2.offset(d.fascia_width)
# units vectors and scale
# is unit normal on sides
# print("s.p:%s, s.v:%s s1.p::%s s1.v::%s" % (s.p, s.v, s1.p, s1.v))
res, p0, t = s0.intersect(s1)
res, p1, t = s0.intersect(s2)
x0, y0 = s.p0
x1, y1 = p0
x2, y2 = p1
x3, y3 = s.p1
z0 = self.z + d.fascia_altitude + pan.altitude(s.p0)
z1 = self.z + d.fascia_altitude + pan.altitude(s.p1)
verts.extend([
(x0, y0, z0),
(x1, y1, z0),
(x2, y2, z1),
(x3, y3, z1),
])
z0 -= d.fascia_height
z1 -= d.fascia_height
verts.extend([
(x0, y0, z0),
(x1, y1, z0),
(x2, y2, z1),
(x3, y3, z1),
])
faces.extend([
# top
(f, f + 1, f + 2, f + 3),
# sides
(f, f + 4, f + 5, f + 1),
(f + 1, f + 5, f + 6, f + 2),
(f + 2, f + 6, f + 7, f + 3),
(f + 3, f + 7, f + 4, f),
# bottom
(f + 4, f + 7, f + 6, f + 5)
])
edges.append([f, f + 3])
edges.append([f + 1, f + 2])
edges.append([f + 4, f + 7])
edges.append([f + 5, f + 6])
matids.extend([idmat, idmat, idmat, idmat, idmat, idmat])
uvs.extend([
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)]
])
def gutter(self, d, verts, faces, edges, matids, uvs):
#####################
# Chenaux
#####################
idmat = 5
# caps at start and end
if d.gutter_segs % 2 == 1:
n_faces = int((d.gutter_segs - 1) / 2)
else:
n_faces = int((d.gutter_segs / 2) - 1)
df = 2 * d.gutter_segs + 1
for pan in self.pans:
for i, s in enumerate(pan.segs):
if s.type == 'BOTTOM':
f = len(verts)
s0 = s.offset(d.gutter_dist + d.gutter_width)
s1 = pan.last_seg(i)
s2 = pan.next_seg(i)
p0 = s0.p0
p1 = s0.p1
tri_0 = pan.node_tri or pan.is_tri
tri_1 = pan.next_tri or pan.is_tri
if pan.side == 'LEFT':
tri_0, tri_1 = tri_1, tri_0
f = len(verts)
# tiangular use segment direction
# find last neighboor depending on type
if s1.type == 'AXIS' or 'LINK' in s1.type:
# apply only on boundarys
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s3 = pan.next_cross
else:
s3 = pan.last_cross
if tri_0:
s1 = s.copy
else:
s1 = s1.oposite
s1.v = (s.sized_normal(0, 1).v + s3.v).normalized()
elif s1.type == 'SIDE':
s1 = s.copy
s1.type = 'SIDE'
s1.v = s.sized_normal(0, 1).v
else:
s1 = s1.offset(d.gutter_dist + d.gutter_width)
# find next neighboor depending on type
if s2.type == 'AXIS' or 'LINK' in s2.type:
if not s.is_hole:
# use last axis
if pan.side == 'LEFT':
s3 = pan.last_cross
else:
s3 = pan.next_cross
if tri_1:
s2 = s.oposite
else:
s2 = s2.copy
s2.v = (s.sized_normal(0, 1).v + s3.v).normalized()
elif s2.type == 'SIDE':
s2 = s.oposite
s2.type = 'SIDE'
s2.v = s.sized_normal(0, 1).v
else:
s2 = s2.offset(d.gutter_dist + d.gutter_width)
# units vectors and scale
# is unit normal on sides
# print("s.p:%s, s.v:%s s1.p::%s s1.v::%s" % (s.p, s.v, s1.p, s1.v))
res, p, t = s0.intersect(s1)
if res:
p0 = p
res, p, t = s0.intersect(s2)
if res:
p1 = p
"""
f = len(verts)
verts.extend([s1.p0.to_3d(), s1.p1.to_3d()])
edges.append([f, f + 1])
f = len(verts)
verts.extend([s2.p0.to_3d(), s2.p1.to_3d()])
edges.append([f, f + 1])
continue
"""
v0 = p0 - s.p0
v1 = p1 - s.p1
scale_0 = v0.length / (d.gutter_dist + d.gutter_width)
scale_1 = v1.length / (d.gutter_dist + d.gutter_width)
s3 = Line(s.p0, v0.normalized())
s4 = Line(s.p1, v1.normalized())
zt = self.z + d.fascia_altitude + pan.altitude(s3.p0)
z0 = self.z + d.gutter_alt + pan.altitude(s3.p0)
z1 = z0 - 0.5 * d.gutter_width
z2 = z1 - 0.5 * d.gutter_width
z3 = z1 - 0.5 * d.gutter_boudin
dz0 = z2 - z1
dz1 = z3 - z1
tt = scale_0 * d.fascia_width
t0 = scale_0 * d.gutter_dist
t1 = t0 + scale_0 * (0.5 * d.gutter_width)
t2 = t1 + scale_0 * (0.5 * d.gutter_width)
t3 = t2 + scale_0 * (0.5 * d.gutter_boudin)
# bord tablette
xt, yt = s3.lerp(tt)
# bord
x0, y0 = s3.lerp(t0)
# axe chenaux
x1, y1 = s3.lerp(t1)
# bord boudin interieur
x2, y2 = s3.lerp(t2)
# axe boudin
x3, y3 = s3.lerp(t3)
dx = x0 - x1
dy = y0 - y1
verts.append((xt, yt, zt))
# chenaux
da = pi / d.gutter_segs
for i in range(d.gutter_segs):
sa = sin(i * da)
ca = cos(i * da)
verts.append((x1 + dx * ca, y1 + dy * ca, z1 + dz0 * sa))
dx = x2 - x3
dy = y2 - y3
# boudin
da = -pi / (0.75 * d.gutter_segs)
for i in range(d.gutter_segs):
sa = sin(i * da)
ca = cos(i * da)
verts.append((x3 + dx * ca, y3 + dy * ca, z1 + dz1 * sa))
zt = self.z + d.fascia_altitude + pan.altitude(s4.p0)
z0 = self.z + d.gutter_alt + pan.altitude(s4.p0)
z1 = z0 - 0.5 * d.gutter_width
z2 = z1 - 0.5 * d.gutter_width
z3 = z1 - 0.5 * d.gutter_boudin
dz0 = z2 - z1
dz1 = z3 - z1
tt = scale_1 * d.fascia_width
t0 = scale_1 * d.gutter_dist
t1 = t0 + scale_1 * (0.5 * d.gutter_width)
t2 = t1 + scale_1 * (0.5 * d.gutter_width)
t3 = t2 + scale_1 * (0.5 * d.gutter_boudin)
# bord tablette
xt, yt = s4.lerp(tt)
# bord
x0, y0 = s4.lerp(t0)
# axe chenaux
x1, y1 = s4.lerp(t1)
# bord boudin interieur
x2, y2 = s4.lerp(t2)
# axe boudin
x3, y3 = s4.lerp(t3)
dx = x0 - x1
dy = y0 - y1
# tablette
verts.append((xt, yt, zt))
faces.append((f + df, f, f + 1, f + df + 1))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
# chenaux
da = pi / d.gutter_segs
for i in range(d.gutter_segs):
sa = sin(i * da)
ca = cos(i * da)
verts.append((x1 + dx * ca, y1 + dy * ca, z1 + dz0 * sa))
dx = x2 - x3
dy = y2 - y3
# boudin
da = -pi / (0.75 * d.gutter_segs)
for i in range(d.gutter_segs):
sa = sin(i * da)
ca = cos(i * da)
verts.append((x3 + dx * ca, y3 + dy * ca, z1 + dz1 * sa))
df = 2 * d.gutter_segs + 1
for i in range(1, 2 * d.gutter_segs):
j = i + f
faces.append((j, j + df, j + df + 1, j + 1))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
"""
segs = 6
n_faces = segs / 2 - 1
0 6
1 5
2 4
3
"""
# close start
if s1.type == 'SIDE':
if d.gutter_segs % 2 == 0:
faces.append((f + n_faces + 3, f + n_faces + 1, f + n_faces + 2))
uvs.append([(0, 0), (1, 0), (0.5, -0.5)])
matids.append(idmat)
for i in range(n_faces):
j = i + f + 1
k = f + d.gutter_segs - i
faces.append((j + 1, k, k + 1, j))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
# close end
if s2.type == 'SIDE':
f += 2 * d.gutter_segs + 1
if d.gutter_segs % 2 == 0:
faces.append((f + n_faces + 1, f + n_faces + 3, f + n_faces + 2))
uvs.append([(0, 0), (1, 0), (0.5, -0.5)])
matids.append(idmat)
for i in range(n_faces):
j = i + f + 1
k = f + d.gutter_segs - i
faces.append((j, k + 1, k, j + 1))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
def beam_primary(self, d, verts, faces, edges, matids, uvs):
idmat = 3
for pan in self.pans:
for i, s in enumerate(pan.segs):
if s.type == 'AXIS':
####################
# Poutre Faitiere
####################
"""
1___________________2 left
0|___________________|3 axis
|___________________| right
5 4
"""
f = len(verts)
s2 = s.offset(-0.5 * d.beam_width)
# offset from roof border
s0 = pan.last_seg(i)
s1 = pan.next_seg(i)
t0 = 0
t1 = 1
s0_tri = pan.next_tri
s1_tri = pan.node_tri
if pan.side == 'LEFT':
s0_tri, s1_tri = s1_tri, s0_tri
if s0.type == 'SIDE' and s.length > 0:
s0 = s0.offset(d.beam_offset)
t0 = -d.beam_offset / s.length
if s0_tri:
p0 = s2.p0
t0 = 0
else:
res, p0, t = s2.intersect(s0)
if not res:
continue
if s1.type == 'SIDE' and s.length > 0:
s1 = s1.offset(d.beam_offset)
t1 = 1 + d.beam_offset / s.length
if s1_tri:
t1 = 1
p1 = s2.p1
else:
res, p1, t = s2.intersect(s1)
if not res:
continue
x0, y0 = p0
x1, y1 = s.lerp(t0)
x2, y2 = p1
x3, y3 = s.lerp(t1)
z0 = self.z + d.beam_alt + pan.altitude(p0)
z1 = z0 - d.beam_height
z2 = self.z + d.beam_alt + pan.altitude(p1)
z3 = z2 - d.beam_height
verts.extend([
(x0, y0, z0),
(x1, y1, z0),
(x2, y2, z2),
(x3, y3, z2),
(x0, y0, z1),
(x1, y1, z1),
(x2, y2, z3),
(x3, y3, z3),
])
if s0_tri or s0.type == 'SIDE':
faces.append((f + 4, f + 5, f + 1, f))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
if s1_tri or s1.type == 'SIDE':
faces.append((f + 2, f + 3, f + 7, f + 6))
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
matids.append(idmat)
faces.extend([
# internal side
# (f + 1, f + 5, f + 7, f + 3),
# external side
(f + 2, f + 6, f + 4, f),
# top
(f, f + 1, f + 3, f + 2),
# bottom
(f + 5, f + 4, f + 6, f + 7)
])
matids.extend([
idmat, idmat, idmat
])
uvs.extend([
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)],
[(0, 0), (0, 1), (1, 1), (1, 0)]
])
def rafter(self, context, o, d):
idmat = 4
# Rafters / Chevrons
start = max(0.001 + 0.5 * d.rafter_width, d.rafter_start)
holes_offset = -d.rafter_width
# build temp bmesh and bissect
for pan in self.pans:
tmin, tmax, ysize = pan.tmin, pan.tmax, pan.ysize
# print("tmin:%s tmax:%s ysize:%s" % (tmin, tmax, ysize))
f = 0
verts = []
faces = []
matids = []
uvs = []
alt = d.rafter_alt
seg = pan.fake_axis
t0 = tmin + (start - 0.5 * d.rafter_width) / seg.length
t1 = tmin + (start + 0.5 * d.rafter_width) / seg.length
tx = start / seg.length
dt = d.rafter_spacing / seg.length
n_items = max(1, round((tmax - tmin) / dt, 0))
dt = ((tmax - tmin) - 2 * tx) / n_items
for j in range(int(n_items) + 1):
n0 = seg.sized_normal(t1 + j * dt, - ysize)
n1 = seg.sized_normal(t0 + j * dt, - ysize)
f = len(verts)
z0 = self.z + alt + pan.altitude(n0.p0)
x0, y0 = n0.p0
z1 = self.z + alt + pan.altitude(n0.p1)
x1, y1 = n0.p1
z2 = self.z + alt + pan.altitude(n1.p0)
x2, y2 = n1.p0
z3 = self.z + alt + pan.altitude(n1.p1)
x3, y3 = n1.p1
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3)
])
faces.append((f + 1, f, f + 2, f + 3))
matids.append(idmat)
uvs.append([(0, 0), (1, 0), (1, 1), (0, 1)])
bm = bmed.buildmesh(
context, o, verts, faces, matids=matids, uvs=uvs,
weld=False, clean=False, auto_smooth=True, temporary=True)
self.cut_boundary(bm, pan)
self.cut_holes(bm, pan, offset={'DEFAULT': holes_offset})
bmesh.ops.dissolve_limit(bm,
angle_limit=0.01,
use_dissolve_boundaries=False,
verts=bm.verts,
edges=bm.edges,
delimit=1)
geom = bm.faces[:]
verts = bm.verts[:]
bmesh.ops.solidify(bm, geom=geom, thickness=0.0001)
bmesh.ops.translate(bm, vec=Vector((0, 0, -d.rafter_height)), space=o.matrix_world, verts=verts)
# uvs for sides
uvs = [(0, 0), (1, 0), (1, 1), (0, 1)]
layer = bm.loops.layers.uv.verify()
for i, face in enumerate(bm.faces):
if len(face.loops) == 4:
for j, loop in enumerate(face.loops):
loop[layer].uv = uvs[j]
# merge with object
bmed.bmesh_join(context, o, [bm], normal_update=True)
bpy.ops.object.mode_set(mode='OBJECT')
def hips(self, d, verts, faces, edges, matids, uvs):
idmat_valley = 5
idmat = 6
idmat_poutre = 4
sx, sy, sz = d.hip_size_x, d.hip_size_y, d.hip_size_z
if d.hip_model == 'ROUND':
# round hips
t_pts = [Vector((sx * x, sy * y, sz * z)) for x, y, z in [
(-0.5, 0.34, 0.08), (-0.5, 0.32, 0.19), (0.5, -0.4, -0.5),
(0.5, 0.4, -0.5), (-0.5, 0.26, 0.28), (-0.5, 0.16, 0.34),
(-0.5, 0.05, 0.37), (-0.5, -0.05, 0.37), (-0.5, -0.16, 0.34),
(-0.5, -0.26, 0.28), (-0.5, -0.32, 0.19), (-0.5, -0.34, 0.08),
(-0.5, -0.25, -0.5), (-0.5, 0.25, -0.5), (0.5, -0.08, 0.5),
(0.5, -0.5, 0.08), (0.5, -0.24, 0.47), (0.5, -0.38, 0.38),
(0.5, -0.47, 0.24), (0.5, 0.5, 0.08), (0.5, 0.08, 0.5),
(0.5, 0.47, 0.24), (0.5, 0.38, 0.38), (0.5, 0.24, 0.47)
]]
t_faces = [
(23, 22, 4, 5), (3, 19, 21, 22, 23, 20, 14, 16, 17, 18, 15, 2), (14, 20, 6, 7),
(18, 17, 9, 10), (15, 18, 10, 11), (21, 19, 0, 1), (17, 16, 8, 9),
(13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 1, 0), (19, 3, 13, 0), (20, 23, 5, 6), (22, 21, 1, 4),
(3, 2, 12, 13), (2, 15, 11, 12), (16, 14, 7, 8)
]
t_uvs = [
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.5, 1.0), (0.75, 0.93), (0.93, 0.75),
(1.0, 0.5), (0.93, 0.25), (0.75, 0.07),
(0.5, 0.0), (0.25, 0.07), (0.07, 0.25),
(0.0, 0.5), (0.07, 0.75), (0.25, 0.93)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.5, 1.0), (0.75, 0.93), (0.93, 0.75),
(1.0, 0.5), (0.93, 0.25), (0.75, 0.07),
(0.5, 0.0), (0.25, 0.07), (0.07, 0.25),
(0.0, 0.5), (0.07, 0.75), (0.25, 0.93)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
]
# affect vertex with slope
t_left = []
t_right = []
elif d.hip_model == 'ETERNIT':
# square hips "eternit like"
t_pts = [Vector((sx * x, sy * y, sz * z)) for x, y, z in [
(0.5, 0.5, 0.0), (-0.5, 0.5, -0.5), (0.5, -0.5, 0.0),
(-0.5, -0.5, -0.5), (0.5, 0.0, 0.0), (-0.5, -0.0, -0.5),
(0.5, 0.0, 0.5), (0.5, -0.5, 0.5), (-0.5, -0.5, 0.0),
(-0.5, -0.0, 0.0), (0.5, 0.5, 0.5), (-0.5, 0.5, 0.0)]
]
t_faces = [
(4, 2, 3, 5), (0, 4, 5, 1), (6, 9, 8, 7),
(10, 11, 9, 6), (0, 10, 6, 4), (5, 9, 11, 1),
(2, 7, 8, 3), (1, 11, 10, 0), (4, 6, 7, 2), (3, 8, 9, 5)
]
t_uvs = [
[(0.0, 0.5), (0.0, 1.0), (1.0, 1.0), (1.0, 0.5)], [(0.0, 0.0), (0.0, 0.5), (1.0, 0.5), (1.0, 0.0)],
[(0.0, 0.5), (1.0, 0.5), (1.0, 1.0), (0.0, 1.0)], [(0.0, 0.0), (1.0, 0.0), (1.0, 0.5), (0.0, 0.5)],
[(0.0, 0.5), (0.0, 1.0), (0.5, 1.0), (0.5, 0.5)], [(0.5, 0.5), (0.5, 1.0), (0.0, 1.0), (0.0, 0.5)],
[(0.0, 0.5), (0.0, 1.0), (1.0, 1.0), (1.0, 0.5)], [(0.0, 0.5), (0.0, 1.0), (-1.0, 1.0), (-1.0, 0.5)],
[(0.5, 0.5), (0.5, 1.0), (1.0, 1.0), (1.0, 0.5)], [(0.0, 0.5), (0.0, 1.0), (-0.5, 1.0), (-0.5, 0.5)]
]
t_left = [2, 3, 7, 8]
t_right = [0, 1, 10, 11]
elif d.hip_model == 'FLAT':
# square hips "eternit like"
t_pts = [Vector((sx * x, sy * y, sz * z)) for x, y, z in [
(-0.5, -0.4, 0.0), (-0.5, -0.4, 0.5), (-0.5, 0.4, 0.0),
(-0.5, 0.4, 0.5), (0.5, -0.5, 0.5), (0.5, -0.5, 1.0),
(0.5, 0.5, 0.5), (0.5, 0.5, 1.0), (-0.5, 0.33, 0.0),
(-0.5, -0.33, 0.0), (0.5, -0.33, 0.5), (0.5, 0.33, 0.5),
(-0.5, 0.33, -0.5), (-0.5, -0.33, -0.5), (0.5, -0.33, -0.5),
(0.5, 0.33, -0.5)]
]
t_faces = [
(0, 1, 3, 2, 8, 9), (2, 3, 7, 6), (6, 7, 5, 4, 10, 11),
(4, 5, 1, 0), (9, 10, 4, 0), (7, 3, 1, 5),
(2, 6, 11, 8), (9, 8, 12, 13), (12, 15, 14, 13),
(8, 11, 15, 12), (10, 9, 13, 14), (11, 10, 14, 15)]
t_uvs = [
[(0.5, 1.0), (0.93, 0.75), (0.93, 0.25), (0.5, 0.0), (0.07, 0.25), (0.07, 0.75)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.5, 1.0), (0.93, 0.75), (0.93, 0.25), (0.5, 0.0), (0.07, 0.25), (0.07, 0.75)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)],
[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
]
t_left = []
t_right = []
t_idmats = [idmat for f in t_faces]
for pan in self.pans:
for i, s in enumerate(pan.segs):
if ('LINK' in s.type and
d.beam_sec_enable):
##############
# beam inside
##############
f = len(verts)
s0 = s.offset(-0.5 * d.beam_sec_width)
s2 = pan.last_seg(i)
s3 = pan.next_seg(i)
p0 = s0.p0
p1 = s0.p1
t0 = 0
t1 = 1
res, p, t = s0.intersect(s2)
if res:
t0 = t
p0 = p
res, p, t = s0.intersect(s3)
if res:
t1 = t
p1 = p
p0 = s.lerp(t0)
p1 = s.lerp(t1)
x0, y0 = s0.lerp(t0)
x1, y1 = s.p0
z0 = self.z + d.beam_sec_alt + pan.altitude(p0)
z1 = z0 - d.beam_sec_height
z2 = self.z + d.beam_sec_alt + pan.altitude(s.p0)
z3 = z2 - d.beam_sec_height
verts.extend([
(x0, y0, z0),
(x0, y0, z1),
(x1, y1, z2),
(x1, y1, z3)
])
x2, y2 = s0.lerp(t1)
x3, y3 = s.p1
z0 = self.z + d.beam_sec_alt + pan.altitude(p1)
z1 = z0 - d.beam_sec_height
z2 = self.z + d.beam_sec_alt + pan.altitude(s.p1)
z3 = z2 - d.beam_sec_height
verts.extend([
(x2, y2, z0),
(x2, y2, z1),
(x3, y3, z2),
(x3, y3, z3)
])
faces.extend([
(f, f + 4, f + 5, f + 1),
(f + 1, f + 5, f + 7, f + 3),
(f + 2, f + 3, f + 7, f + 6),
(f + 2, f + 6, f + 4, f),
(f, f + 1, f + 3, f + 2),
(f + 5, f + 4, f + 6, f + 7)
])
matids.extend([
idmat_poutre, idmat_poutre, idmat_poutre,
idmat_poutre, idmat_poutre, idmat_poutre
])
uvs.extend([
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)]
])
if s.type == 'LINK_HIP':
# TODO:
# Slice borders properly
if d.hip_enable:
s0 = pan.last_seg(i)
s1 = pan.next_seg(i)
s2 = s
p0 = s0.p1
p1 = s1.p0
z0 = pan.altitude(p0)
z1 = pan.altitude(p1)
# s0 is top seg
if z1 > z0:
p0, p1 = p1, p0
z0, z1 = z1, z0
s2 = s2.oposite
dz = pan.altitude(s2.sized_normal(0, 1).p1) - z0
if dz < 0:
s1 = s1.offset(d.tile_border)
# vx from p0 to p1
x, y = p1 - p0
v = Vector((x, y, z1 - z0))
vx = v.normalized()
vy = vx.cross(Vector((0, 0, 1)))
vz = vy.cross(vx)
x0, y0 = p0 + d.hip_alt * vz.to_2d()
z2 = z0 + self.z + d.hip_alt * vz.z
tM = Matrix([
[vx.x, vy.x, vz.x, x0],
[vx.y, vy.y, vz.y, y0],
[vx.z, vy.z, vz.z, z2],
[0, 0, 0, 1]
])
space_x = v.length - d.tile_border
n_x = 1 + int(space_x / d.hip_space_x)
dx = space_x / n_x
x0 = 0.5 * dx
t_verts = [p for p in t_pts]
# apply slope
for i in t_left:
t_verts[i] = t_verts[i].copy()
t_verts[i].z -= dz * t_verts[i].y
for i in t_right:
t_verts[i] = t_verts[i].copy()
t_verts[i].z += dz * t_verts[i].y
for k in range(n_x):
lM = tM * Matrix([
[1, 0, 0, x0 + k * dx],
[0, -1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
f = len(verts)
verts.extend([lM * p for p in t_verts])
faces.extend([tuple(i + f for i in p) for p in t_faces])
matids.extend(t_idmats)
uvs.extend(t_uvs)
elif s.type == 'LINK_VALLEY':
if d.valley_enable:
f = len(verts)
s0 = s.offset(-2 * d.tile_couloir)
s1 = pan.last_seg(i)
s2 = pan.next_seg(i)
p0 = s0.p0
p1 = s0.p1
res, p, t = s0.intersect(s1)
if res:
p0 = p
res, p, t = s0.intersect(s2)
if res:
p1 = p
alt = self.z + d.valley_altitude
x0, y0 = s1.p1
x1, y1 = p0
x2, y2 = p1
x3, y3 = s2.p0
z0 = alt + pan.altitude(s1.p1)
z1 = alt + pan.altitude(p0)
z2 = alt + pan.altitude(p1)
z3 = alt + pan.altitude(s2.p0)
verts.extend([
(x0, y0, z0),
(x1, y1, z1),
(x2, y2, z2),
(x3, y3, z3),
])
faces.extend([
(f, f + 3, f + 2, f + 1)
])
matids.extend([
idmat_valley
])
uvs.extend([
[(0, 0), (1, 0), (1, 1), (0, 1)]
])
elif s.type == 'AXIS' and d.hip_enable and pan.side == 'LEFT':
tmin = 0
tmax = 1
s0 = pan.last_seg(i)
if s0.type == 'SIDE' and s.length > 0:
tmin = 0 - d.tile_side / s.length
s1 = pan.next_seg(i)
if s1.type == 'SIDE' and s.length > 0:
tmax = 1 + d.tile_side / s.length
# print("tmin:%s tmax:%s" % (tmin, tmax))
####################
# Faitiere
####################
f = len(verts)
s_len = (tmax - tmin) * s.length
n_obj = 1 + int(s_len / d.hip_space_x)
dx = s_len / n_obj
x0 = 0.5 * dx
v = s.v.normalized()
p0 = s.lerp(tmin)
tM = Matrix([
[v.x, v.y, 0, p0.x],
[v.y, -v.x, 0, p0.y],
[0, 0, 1, self.z + d.hip_alt],
[0, 0, 0, 1]
])
t_verts = [p.copy() for p in t_pts]
# apply slope
for i in t_left:
t_verts[i].z += t_verts[i].y * (pan.other_side.slope - d.tile_size_z / d.tile_size_y)
for i in t_right:
t_verts[i].z -= t_verts[i].y * (pan.slope - d.tile_size_z / d.tile_size_y)
for k in range(n_obj):
lM = tM * Matrix([
[1, 0, 0, x0 + k * dx],
[0, -1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
v = len(verts)
verts.extend([lM * p for p in t_verts])
faces.extend([tuple(i + v for i in f) for f in t_faces])
matids.extend(t_idmats)
uvs.extend(t_uvs)
def make_hole(self, context, hole_obj, o, d, update_parent=False):
"""
Hole for t child on parent
create / update a RoofCutter on parent
assume context object is child roof
with parent set
"""
# print("Make hole :%s hole_obj:%s" % (o.name, hole_obj))
if o.parent is None:
return
# root is a RoofSegment
root = self.nodes[0].root
r_pan = root.right
l_pan = root.left
# merge :
# 5 ____________ 4
# / |
# / left |
# /_____axis_____| 3 <- kill axis and this one
# 0\ |
# \ right |
# 1 \____________| 2
#
# degenerate case:
#
# /|
# / |
# \ |
# \|
#
segs = []
last = len(r_pan.segs) - 1
for i, seg in enumerate(r_pan.segs):
# r_pan start parent roof side
if i == last:
to_merge = seg.copy
elif seg.type != 'AXIS':
segs.append(seg.copy)
for i, seg in enumerate(l_pan.segs):
# l_pan end parent roof side
if i == 1:
# 0 is axis
to_merge.p1 = seg.p1
segs.append(to_merge)
elif seg.type != 'AXIS':
segs.append(seg.copy)
# if there is side offset:
# create an arrow
#
# 4 s4
# /|
# / |___s1_______
# / p3 | p2 s3
# 0\ p0___s0_______| p1
# \ |
# 1 \|
s0 = root.left._axis.offset(
max(0.001,
min(
root.right.ysize - 0.001,
root.right.ysize - d.hole_offset_right
)
))
s1 = root.left._axis.offset(
-max(0.001,
min(
root.left.ysize - 0.001,
root.left.ysize - d.hole_offset_left
)
))
s3 = segs[2].offset(
-min(root.left.xsize - 0.001, d.hole_offset_front)
)
s4 = segs[0].copy
p1 = s4.p1
s4.p1 = segs[-1].p0
s4.p0 = p1
res, p0, t = s4.intersect(s0)
res, p1, t = s0.intersect(s3)
res, p2, t = s1.intersect(s3)
res, p3, t = s4.intersect(s1)
pts = []
# pts in cw order for 'DIFFERENCE' mode
pts.extend([segs[-1].p1, segs[-1].p0])
if (segs[-1].p0 - p3).length > 0.001:
pts.append(p3)
pts.extend([p2, p1])
if (segs[0].p1 - p0).length > 0.001:
pts.append(p0)
pts.extend([segs[0].p1, segs[0].p0])
pts = [p.to_3d() for p in pts]
if hole_obj is None:
context.scene.objects.active = o.parent
bpy.ops.archipack.roof_cutter(parent=d.t_parent, auto_manipulate=False)
hole_obj = context.active_object
else:
context.scene.objects.active = hole_obj
hole_obj.select = True
if d.parts[0].a0 < 0:
y = -d.t_dist_y
else:
y = d.t_dist_y
hole_obj.matrix_world = o.matrix_world * Matrix([
[1, 0, 0, 0],
[0, 1, 0, y],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
hd = archipack_roof_cutter.datablock(hole_obj)
hd.boundary = o.name
hd.update_points(context, hole_obj, pts, update_parent=update_parent)
hole_obj.select = False
context.scene.objects.active = o
def change_coordsys(self, fromTM, toTM):
"""
move shape fromTM into toTM coordsys
"""
dp = (toTM.inverted() * fromTM.translation).to_2d()
da = toTM.row[1].to_2d().angle_signed(fromTM.row[1].to_2d())
ca = cos(da)
sa = sin(da)
rM = Matrix([
[ca, -sa],
[sa, ca]
])
for s in self.segs:
tp = (rM * s.p0) - s.p0 + dp
s.rotate(da)
s.translate(tp)
def t_partition(self, array, begin, end):
pivot = begin
for i in range(begin + 1, end + 1):
# wall idx
if array[i][0] < array[begin][0]:
pivot += 1
array[i], array[pivot] = array[pivot], array[i]
array[pivot], array[begin] = array[begin], array[pivot]
return pivot
def sort_t(self, array, begin=0, end=None):
# print("sort_child")
if end is None:
end = len(array) - 1
def _quicksort(array, begin, end):
if begin >= end:
return
pivot = self.t_partition(array, begin, end)
_quicksort(array, begin, pivot - 1)
_quicksort(array, pivot + 1, end)
return _quicksort(array, begin, end)
def make_wall_fit(self, context, o, wall, inside, auto_update, skip_z):
"""
Skip_z : dosent set z for auto-fit roof
"""
wd = wall.data.archipack_wall2[0]
wg = wd.get_generator()
z0 = self.z - wd.z
# wg in roof coordsys
wg.change_coordsys(wall.matrix_world, o.matrix_world)
if inside:
# fit inside
offset = -0.5 * (1 - wd.x_offset) * wd.width
else:
# fit outside
offset = 0
wg.set_offset(offset)
wall_t = [[] for w in wg.segs]
for pan in self.pans:
# walls segment
for widx, wseg in enumerate(wg.segs):
ls = wseg.line.length
for seg in pan.segs:
# intersect with a roof segment
# any linked or axis intersection here
# will be dup as they are between 2 roof parts
res, p, t, v = wseg.line.intersect_ext(seg)
if res:
z = z0 + pan.altitude(p)
wall_t[widx].append((t, z, t * ls))
# lie under roof
if type(wseg).__name__ == "CurvedWall":
for step in range(12):
t = step / 12
p = wseg.line.lerp(t)
if pan.inside(p):
z = z0 + pan.altitude(p)
wall_t[widx].append((t, z, t * ls))
else:
if pan.inside(wseg.line.p0):
z = z0 + pan.altitude(wseg.line.p0)
wall_t[widx].append((0, z, 0))
old = context.active_object
old_sel = wall.select
wall.select = True
context.scene.objects.active = wall
if auto_update:
wd.auto_update = False
for part in wd.parts:
part.auto_update = False
# setup splits count and first split to 0
for widx, seg in enumerate(wall_t):
self.sort_t(seg)
# print("seg: %s" % seg)
wd.parts[widx].n_splits = len(seg) + 1
wd.parts[widx].z[0] = 0
wd.parts[widx].t[0] = 0
# add splits, skip dups
for widx, seg in enumerate(wall_t):
t0 = 0
last_d = -1
sid = 1
for s in seg:
t, z, d = s
if skip_z:
z = 0
if t == 0:
# add at end of last segment
if widx > 0:
lid = wd.parts[widx - 1].n_splits - 1
wd.parts[widx - 1].z[lid] = z
wd.parts[widx - 1].t[lid] = 1
else:
wd.parts[widx].z[0] = z
wd.parts[widx].t[0] = t
sid = 1
else:
if d - last_d < 0.001:
wd.parts[widx].n_splits -= 1
continue
wd.parts[widx].z[sid] = z
wd.parts[widx].t[sid] = t - t0
t0 = t
sid += 1
last_d = d
if wd.closed:
z = wd.parts[0].z[0]
if skip_z:
z = 0
last = wd.parts[wd.n_parts].n_splits - 1
wd.parts[wd.n_parts].z[last] = z
wd.parts[wd.n_parts].t[last] = 1.0
for part in wd.parts:
part.auto_update = True
if auto_update:
wd.auto_update = True
"""
for s in self.segs:
s.as_curve(context)
for s in wg.segs:
s.as_curve(context)
"""
wall.select = old_sel
context.scene.objects.active = old
def boundary(self, context, o):
"""
either external or holes cuts
"""
to_remove = []
wd = archipack_roof.datablock(o)
# schrinkwrap target use parent's holes
if wd.schrinkwrap_target and o.parent:
childs = o.parent.children
else:
childs = o.children
for b in childs:
d = archipack_roof_cutter.datablock(b)
if d is not None:
g = d.ensure_direction()
g.change_coordsys(b.matrix_world, o.matrix_world)
for i, pan in enumerate(self.pans):
keep = pan.slice(g)
if not keep:
if i not in to_remove:
to_remove.append(i)
pan.limits()
to_remove.sort()
for i in reversed(to_remove):
self.pans.pop(i)
def draft(self, context, verts, edges):
for pan in self.pans:
pan.draw(context, self.z, verts, edges)
for s in self.segs:
if s.constraint_type == 'SLOPE':
f = len(verts)
p0 = s.p0.to_3d()
p0.z = self.z
p1 = s.p1.to_3d()
p1.z = self.z
verts.extend([p0, p1])
edges.append([f, f + 1])
def update(self, context):
self.update(context)
def update_manipulators(self, context):
self.update(context, manipulable_refresh=True)
def update_path(self, context):
self.update_path(context)
def update_parent(self, context):
# update part a0
o = context.active_object
p, d = self.find_parent(context)
if d is not None:
o.parent = p
# trigger object update
# hole creation and parent's update
self.parts[0].a0 = pi / 2
elif self.t_parent != "":
self.t_parent = ""
def update_cutter(self, context):
self.update(context, update_hole=True)
def update_childs(self, context):
self.update(context, update_childs=True, update_hole=True)
def update_components(self, context):
self.update(context, update_parent=False, update_hole=False)
class ArchipackSegment():
length = FloatProperty(
name="Length",
min=0.01,
max=1000.0,
default=4.0,
update=update
)
a0 = FloatProperty(
name="Angle",
min=-2 * pi,
max=2 * pi,
default=0,
subtype='ANGLE', unit='ROTATION',
update=update_cutter
)
manipulators = CollectionProperty(type=archipack_manipulator)
class ArchipackLines():
n_parts = IntProperty(
name="Parts",
min=1,
default=1, update=update_manipulators
)
# UI layout related
parts_expand = BoolProperty(
default=False
)
def draw(self, layout, context):
box = layout.box()
row = box.row()
if self.parts_expand:
row.prop(self, 'parts_expand', icon="TRIA_DOWN", icon_only=True, text="Parts", emboss=False)
box.prop(self, 'n_parts')
for i, part in enumerate(self.parts):
part.draw(layout, context, i)
else:
row.prop(self, 'parts_expand', icon="TRIA_RIGHT", icon_only=True, text="Parts", emboss=False)
def update_parts(self):
# print("update_parts")
# remove rows
# NOTE:
# n_parts+1
# as last one is end point of last segment or closing one
for i in range(len(self.parts), self.n_parts + 1, -1):
self.parts.remove(i - 1)
# add rows
for i in range(len(self.parts), self.n_parts + 1):
self.parts.add()
self.setup_manipulators()
def setup_parts_manipulators(self):
for i in range(self.n_parts + 1):
p = self.parts[i]
n_manips = len(p.manipulators)
if n_manips < 1:
s = p.manipulators.add()
s.type_key = "ANGLE"
s.prop1_name = "a0"
if n_manips < 2:
s = p.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "length"
if n_manips < 3:
s = p.manipulators.add()
s.type_key = 'WALL_SNAP'
s.prop1_name = str(i)
s.prop2_name = 'z'
if n_manips < 4:
s = p.manipulators.add()
s.type_key = 'DUMB_STRING'
s.prop1_name = str(i + 1)
if n_manips < 5:
s = p.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "offset"
p.manipulators[2].prop1_name = str(i)
p.manipulators[3].prop1_name = str(i + 1)
class archipack_roof_segment(ArchipackSegment, PropertyGroup):
bound_idx = IntProperty(
name="Link to",
default=0,
min=0,
update=update_manipulators
)
width_left = FloatProperty(
name="L Width",
min=0.01,
default=3.0,
update=update_cutter
)
width_right = FloatProperty(
name="R Width",
min=0.01,
default=3.0,
update=update_cutter
)
slope_left = FloatProperty(
name="L slope",
min=0.0,
default=0.3,
update=update_cutter
)
slope_right = FloatProperty(
name="R slope",
min=0.0,
default=0.3,
update=update_cutter
)
auto_left = EnumProperty(
description="Left mode",
name="Left",
items=(
('AUTO', 'Auto', '', 0),
('WIDTH', 'Width', '', 1),
('SLOPE', 'Slope', '', 2),
('ALL', 'All', '', 3),
),
default="AUTO",
update=update_manipulators
)
auto_right = EnumProperty(
description="Right mode",
name="Right",
items=(
('AUTO', 'Auto', '', 0),
('WIDTH', 'Width', '', 1),
('SLOPE', 'Slope', '', 2),
('ALL', 'All', '', 3),
),
default="AUTO",
update=update_manipulators
)
triangular_end = BoolProperty(
name="Triangular end",
default=False,
update=update
)
take_precedence = BoolProperty(
name="Take precedence",
description="On T segment take width precedence",
default=False,
update=update
)
constraint_type = EnumProperty(
items=(
('HORIZONTAL', 'Horizontal', '', 0),
('SLOPE', 'Slope', '', 1)
),
default='HORIZONTAL',
update=update_manipulators
)
enforce_part = EnumProperty(
name="Enforce part",
items=(
('AUTO', 'Auto', '', 0),
('VALLEY', 'Valley', '', 1),
('HIP', 'Hip', '', 2)
),
default='AUTO',
update=update
)
# DimensionProvider
uid = IntProperty(default=0)
def find_in_selection(self, context):
"""
find witch selected object this instance belongs to
provide support for "copy to selected"
"""
selected = [o for o in context.selected_objects]
for o in selected:
d = archipack_roof.datablock(o)
if d:
for part in d.parts:
if part == self:
return d
return None
def draw(self, layout, context, index):
box = layout.box()
if index > 0:
box.prop(self, "constraint_type", text=str(index + 1))
if self.constraint_type == 'SLOPE':
box.prop(self, "enforce_part", text="")
else:
box.label("Part 1:")
box.prop(self, "length")
box.prop(self, "a0")
if index > 0:
box.prop(self, 'bound_idx')
if self.constraint_type == 'HORIZONTAL':
box.prop(self, "triangular_end")
row = box.row(align=True)
row.prop(self, "auto_left", text="")
row.prop(self, "auto_right", text="")
if self.auto_left in {'ALL', 'WIDTH'}:
box.prop(self, "width_left")
if self.auto_left in {'ALL', 'SLOPE'}:
box.prop(self, "slope_left")
if self.auto_right in {'ALL', 'WIDTH'}:
box.prop(self, "width_right")
if self.auto_right in {'ALL', 'SLOPE'}:
box.prop(self, "slope_right")
elif self.constraint_type == 'HORIZONTAL':
box.prop(self, "triangular_end")
def update(self, context, manipulable_refresh=False, update_hole=False):
props = self.find_in_selection(context)
if props is not None:
props.update(context,
manipulable_refresh,
update_parent=True,
update_hole=True,
update_childs=True)
class archipack_roof(ArchipackLines, ArchipackObject, Manipulable, DimensionProvider, PropertyGroup):
parts = CollectionProperty(type=archipack_roof_segment)
z = FloatProperty(
name="Altitude",
default=3, precision=2, step=1,
unit='LENGTH', subtype='DISTANCE',
update=update_childs
)
slope_left = FloatProperty(
name="L slope",
default=0.5, precision=2, step=1,
update=update_childs
)
slope_right = FloatProperty(
name="R slope",
default=0.5, precision=2, step=1,
update=update_childs
)
width_left = FloatProperty(
name="L width",
default=3, precision=2, step=1,
unit='LENGTH', subtype='DISTANCE',
update=update_cutter
)
width_right = FloatProperty(
name="R width",
default=3, precision=2, step=1,
unit='LENGTH', subtype='DISTANCE',
update=update_cutter
)
draft = BoolProperty(
options={'SKIP_SAVE'},
name="Draft mode",
default=False,
update=update_manipulators
)
auto_update = BoolProperty(
options={'SKIP_SAVE'},
default=True,
update=update_manipulators
)
quick_edit = BoolProperty(
options={'SKIP_SAVE'},
name="Quick Edit",
default=True
)
tile_enable = BoolProperty(
name="Enable",
default=True,
update=update_components
)
tile_solidify = BoolProperty(
name="Solidify",
default=True,
update=update_components
)
tile_height = FloatProperty(
name="Height",
description="Amount for solidify",
min=0,
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_bevel = BoolProperty(
name="Bevel",
default=False,
update=update_components
)
tile_bevel_amt = FloatProperty(
name="Amount",
description="Amount for bevel",
min=0,
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_bevel_segs = IntProperty(
name="Segs",
description="Bevel Segs",
min=1,
default=2,
update=update_components
)
tile_alternate = BoolProperty(
name="Alternate",
default=False,
update=update_components
)
tile_offset = FloatProperty(
name="Offset",
description="Offset from start",
min=0,
max=100,
subtype="PERCENTAGE",
update=update_components
)
tile_altitude = FloatProperty(
name="Altitude",
description="Altitude from roof",
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_size_x = FloatProperty(
name="Width",
description="Size of tiles on x axis",
min=0.01,
default=0.2,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_size_y = FloatProperty(
name="Length",
description="Size of tiles on y axis",
min=0.01,
default=0.3,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_size_z = FloatProperty(
name="Thickness",
description="Size of tiles on z axis",
min=0.0,
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_space_x = FloatProperty(
name="Width",
description="Space between tiles on x axis",
min=0.01,
default=0.2,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_space_y = FloatProperty(
name="Length",
description="Space between tiles on y axis",
min=0.01,
default=0.3,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_fit_x = BoolProperty(
name="Fit x",
description="Fit roof on x axis",
default=True,
update=update_components
)
tile_fit_y = BoolProperty(
name="Fit y",
description="Fit roof on y axis",
default=True,
update=update_components
)
tile_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Tiles",
description="Expand tiles panel",
default=False
)
tile_model = EnumProperty(
name="Model",
items=(
('BRAAS1', 'Braas 1', '', 0),
('BRAAS2', 'Braas 2', '', 1),
('ETERNIT', 'Eternit', '', 2),
('LAUZE', 'Lauze', '', 3),
('ROMAN', 'Roman', '', 4),
('ROUND', 'Round', '', 5),
('PLACEHOLDER', 'Square', '', 6),
('ONDULEE', 'Ondule', '', 7),
('METAL', 'Metal', '', 8),
# ('USER', 'User defined', '', 7)
),
default="BRAAS2",
update=update_components
)
tile_side = FloatProperty(
name="Side",
description="Space on side",
default=0,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_couloir = FloatProperty(
name="Valley",
description="Space between tiles on valley",
min=0,
default=0.05,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
tile_border = FloatProperty(
name="Bottom",
description="Tiles offset from bottom",
default=0,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
gutter_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Gutter",
description="Expand gutter panel",
default=False
)
gutter_enable = BoolProperty(
name="Enable",
default=True,
update=update_components
)
gutter_alt = FloatProperty(
name="Altitude",
description="altitude",
default=0,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
gutter_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.15,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
gutter_dist = FloatProperty(
name="Spacing",
description="Spacing",
min=0,
default=0.05,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
gutter_boudin = FloatProperty(
name="Small width",
description="Small width",
min=0,
default=0.015,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
gutter_segs = IntProperty(
default=6,
min=1,
name="Segs",
update=update_components
)
beam_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Beam",
description="Expand beam panel",
default=False
)
beam_enable = BoolProperty(
name="Ridge pole",
default=True,
update=update_components
)
beam_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.2,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_height = FloatProperty(
name="Height",
description="Height",
min=0.01,
default=0.35,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_offset = FloatProperty(
name="Offset",
description="Distance from roof border",
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_alt = FloatProperty(
name="Altitude",
description="Altitude from roof",
default=-0.15,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_sec_enable = BoolProperty(
name="Hip rafter",
default=True,
update=update_components
)
beam_sec_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.15,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_sec_height = FloatProperty(
name="Height",
description="Height",
min=0.01,
default=0.2,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
beam_sec_alt = FloatProperty(
name="Altitude",
description="Distance from roof",
default=-0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
rafter_enable = BoolProperty(
name="Rafter",
default=True,
update=update_components
)
rafter_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
rafter_height = FloatProperty(
name="Height",
description="Height",
min=0.01,
default=0.2,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
rafter_spacing = FloatProperty(
name="Spacing",
description="Spacing",
min=0.1,
default=0.7,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
rafter_start = FloatProperty(
name="Offset",
description="Spacing from roof border",
min=0,
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
rafter_alt = FloatProperty(
name="Altitude",
description="Altitude from roof",
max=-0.0001,
default=-0.001,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_enable = BoolProperty(
name="Enable",
default=True,
update=update_components
)
hip_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Hips",
description="Expand hips panel",
default=False
)
hip_alt = FloatProperty(
name="Altitude",
description="Hip altitude from roof",
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_space_x = FloatProperty(
name="Spacing",
description="Space between hips",
min=0.01,
default=0.4,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_size_x = FloatProperty(
name="Length",
description="Length of hip",
min=0.01,
default=0.4,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_size_y = FloatProperty(
name="Width",
description="Width of hip",
min=0.01,
default=0.15,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_size_z = FloatProperty(
name="Height",
description="Height of hip",
min=0.0,
default=0.15,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
hip_model = EnumProperty(
name="Model",
items=(
('ROUND', 'Round', '', 0),
('ETERNIT', 'Eternit', '', 1),
('FLAT', 'Flat', '', 2)
),
default="ROUND",
update=update_components
)
valley_altitude = FloatProperty(
name="Altitude",
description="Valley altitude from roof",
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
valley_enable = BoolProperty(
name="Valley",
default=True,
update=update_components
)
fascia_enable = BoolProperty(
name="Enable",
description="Enable Fascia",
default=True,
update=update_components
)
fascia_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Fascia",
description="Expand fascia panel",
default=False
)
fascia_height = FloatProperty(
name="Height",
description="Height",
min=0.01,
default=0.3,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
fascia_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
fascia_offset = FloatProperty(
name="Offset",
description="Offset from roof border",
default=0,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
fascia_altitude = FloatProperty(
name="Altitude",
description="Fascia altitude from roof",
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
bargeboard_enable = BoolProperty(
name="Enable",
description="Enable Bargeboard",
default=True,
update=update_components
)
bargeboard_expand = BoolProperty(
options={'SKIP_SAVE'},
name="Bargeboard",
description="Expand Bargeboard panel",
default=False
)
bargeboard_height = FloatProperty(
name="Height",
description="Height",
min=0.01,
default=0.3,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
bargeboard_width = FloatProperty(
name="Width",
description="Width",
min=0.01,
default=0.02,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
bargeboard_offset = FloatProperty(
name="Offset",
description="Offset from roof border",
default=0.001,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
bargeboard_altitude = FloatProperty(
name="Altitude",
description="Fascia altitude from roof",
default=0.1,
unit='LENGTH', subtype='DISTANCE',
update=update_components
)
t_parent = StringProperty(
name="Parent",
default="",
update=update_parent
)
t_part = IntProperty(
name="Part",
description="Parent part index",
default=0,
min=0,
update=update_cutter
)
t_dist_x = FloatProperty(
name="Dist x",
description="Location on axis ",
default=0,
update=update_cutter
)
t_dist_y = FloatProperty(
name="Dist y",
description="Lateral distance from axis",
min=0.0001,
default=0.0001,
update=update_cutter
)
hole_offset_left = FloatProperty(
name="Left",
description="Left distance from border",
min=0,
default=0,
update=update_cutter
)
hole_offset_right = FloatProperty(
name="Right",
description="Right distance from border",
min=0,
default=0,
update=update_cutter
)
hole_offset_front = FloatProperty(
name="Front",
description="Front distance from border",
default=0,
update=update_cutter
)
schrinkwrap_target = BoolProperty(
name="Schrikwrap target",
description="Use this part as target for wall fit",
default=False
)
def make_wall_fit(self, context, o, wall, inside=False, auto_update=True, skip_z=False):
origin = Vector((0, 0, self.z))
g = self.get_generator(origin)
g.make_roof(context)
g.make_wall_fit(context, o, wall, inside, auto_update, skip_z)
def find_shrinkwrap(self, o):
for c in o.children:
d = archipack_roof.datablock(c)
if d and d.schrinkwrap_target:
return c
return None
def create_shrinkwrap(self, context, o, target=None):
"""
Create shrinkwrap target from roof
"""
m = o.data.copy()
if target is None:
new_o = bpy.data.objects.new(o.name, m)
else:
old_m = target.data
target.data = m
new_o = target
bpy.data.meshes.remove(old_m)
d = archipack_roof.datablock(new_o)
d.schrinkwrap_target = True
if target is None:
context.scene.objects.link(new_o)
new_o.parent = o
new_o.matrix_world = o.matrix_world.copy()
new_o.select = True
d.auto_update = True
new_o.select = False
return new_o
def update_parts(self):
# NOTE:
# n_parts+1
# as last one is end point of last segment or closing one
for i in range(len(self.parts), self.n_parts, -1):
self.parts.remove(i - 1)
# add rows
for i in range(len(self.parts), self.n_parts):
bound_idx = len(self.parts)
self.parts.add()
self.parts[-1].bound_idx = bound_idx
for p in self.parts:
if p.uid == 0:
self.create_uid(p)
self.setup_manipulators()
def setup_manipulators(self):
if self.schrinkwrap_target:
return
if len(self.manipulators) < 1:
s = self.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "z"
s.normal = (0, 1, 0)
if len(self.manipulators) < 2:
s = self.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "width_left"
if len(self.manipulators) < 3:
s = self.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "width_right"
for i in range(self.n_parts):
p = self.parts[i]
n_manips = len(p.manipulators)
if n_manips < 1:
s = p.manipulators.add()
s.type_key = "ANGLE"
s.prop1_name = "a0"
if n_manips < 2:
s = p.manipulators.add()
s.type_key = "SIZE"
s.prop1_name = "length"
if n_manips < 3:
s = p.manipulators.add()
s.type_key = 'DUMB_STRING'
s.prop1_name = str(i + 1)
p.manipulators[2].prop1_name = str(i + 1)
if n_manips < 4:
s = p.manipulators.add()
s.type_key = 'SIZE'
s.prop1_name = "width_left"
if n_manips < 5:
s = p.manipulators.add()
s.type_key = 'SIZE'
s.prop1_name = "width_right"
if n_manips < 6:
s = p.manipulators.add()
s.type_key = 'SIZE'
s.prop1_name = "slope_left"
if n_manips < 7:
s = p.manipulators.add()
s.type_key = 'SIZE'
s.prop1_name = "slope_right"
def get_generator(self, origin=Vector((0, 0, 0))):
g = RoofGenerator(self, origin)
# TODO: sort part by bound idx so deps always find parent
for i, part in enumerate(self.parts):
# skip part if bound_idx > parent
# so deps always see parent
if part.bound_idx <= i:
g.add_part(part)
if not self.schrinkwrap_target:
g.locate_manipulators()
return g
def make_surface(self, o, verts, edges):
bm = bmesh.new()
for v in verts:
bm.verts.new(v)
bm.verts.ensure_lookup_table()
for ed in edges:
bm.edges.new((bm.verts[ed[0]], bm.verts[ed[1]]))
bm.edges.ensure_lookup_table()
# bmesh.ops.contextual_create(bm, geom=bm.edges)
bm.to_mesh(o.data)
bm.free()
def find_parent(self, context):
o = context.scene.objects.get(self.t_parent)
return o, archipack_roof.datablock(o)
def intersection_angle(self, t_slope, t_width, p_slope, angle):
# 2d intersection angle between two roofs parts
dy = abs(t_slope * t_width / p_slope)
ca = cos(angle)
ta = tan(angle)
if ta == 0:
w0 = 0
else:
w0 = dy * ta
if ca == 0:
w1 = 0
else:
w1 = t_width / ca
dx = w1 - w0
return atan2(dy, dx)
def relocate_child(self, context, o, g, child):
d = archipack_roof.datablock(child)
if d is not None and d.t_part - 1 < len(g.segs):
# print("relocate_child(%s)" % (child.name))
seg = g.segs[d.t_part]
# adjust T part matrix_world from parent
# T part origin located on parent axis
# with y in parent direction
t = (d.t_dist_x / seg.length)
x, y, z = seg.lerp(t).to_3d()
dy = -seg.v.normalized()
child.matrix_world = o.matrix_world * Matrix([
[dy.x, -dy.y, 0, x],
[dy.y, dy.x, 0, y],
[0, 0, 1, z],
[0, 0, 0, 1]
])
def relocate_childs(self, context, o, g):
for child in o.children:
d = archipack_roof.datablock(child)
if d is not None and d.t_parent == o.name:
self.relocate_child(context, o, g, child)
def update_childs(self, context, o, g):
for child in o.children:
d = archipack_roof.datablock(child)
if d is not None:
if d.t_parent == o.name and not d.schrinkwrap_target:
# print("upate_childs(%s)" % (child.name))
child.select = True
context.scene.objects.active = child
# regenerate hole
d.update(context, update_hole=True, update_parent=False)
child.select = False
o.select = True
context.scene.objects.active = o
def update(self,
context,
manipulable_refresh=False,
update_childs=False,
update_parent=True,
update_hole=False,
force_update=False):
"""
update_hole: on t_child must update parent
update_childs: force childs update
force_update: skip throttle
"""
# print("update")
o = self.find_in_selection(context, self.auto_update)
if o is None:
return
# clean up manipulators before any data model change
if manipulable_refresh and not self.schrinkwrap_target:
self.manipulable_disable(context)
self.update_parts()
verts, edges, faces, matids, uvs = [], [], [], [], []
y = 0
z = self.z
p, d = self.find_parent(context)
g = None
# t childs: use parent to relocate
# setup slopes into generator
if d is not None:
pg = d.get_generator()
pg.make_roof(context)
if self.t_part - 1 < len(pg.segs):
seg = pg.nodes[self.t_part].root
d.relocate_child(context, p, pg, o)
a0 = self.parts[0].a0
a_axis = a0 - pi / 2
a_offset = 0
s_left = self.slope_left
w_left = -self.width_left
s_right = self.slope_right
w_right = self.width_right
if a0 > 0:
# a_axis est mesure depuis la perpendiculaire à l'axe
slope = seg.right.slope
y = self.t_dist_y
else:
a_offset = pi
slope = seg.left.slope
y = -self.t_dist_y
s_left, s_right = s_right, s_left
w_left, w_right = -w_right, -w_left
if slope == 0:
slope = 0.0001
# print("slope: %s" % (slope))
z = d.z - self.t_dist_y * slope
# a_right from axis cross z
b_right = self.intersection_angle(
s_left,
w_left,
slope,
a_axis)
a_right = b_right + a_offset
b_left = self.intersection_angle(
s_right,
w_right,
slope,
a_axis)
a_left = b_left + a_offset
g = self.get_generator(origin=Vector((0, y, z)))
# override by user defined slope if any
make_right = True
make_left = True
for s in g.segs:
if (s.constraint_type == 'SLOPE' and
s.v0_idx == 0):
da = g.segs[0].v.angle_signed(s.v)
if da > 0:
make_left = False
else:
make_right = False
if make_left:
# Add 'SLOPE' constraints for segment 0
v = Vector((cos(a_left), sin(a_left)))
s = StraightRoof(g.origin, v)
s.v0_idx = 0
s.constraint_type = 'SLOPE'
# s.enforce_part = 'VALLEY'
s.angle_0 = a_left
s.take_precedence = False
g.segs.append(s)
if make_right:
v = Vector((cos(a_right), sin(a_right)))
s = StraightRoof(g.origin, v)
s.v0_idx = 0
s.constraint_type = 'SLOPE'
# s.enforce_part = 'VALLEY'
s.angle_0 = a_right
s.take_precedence = False
g.segs.append(s)
if g is None:
g = self.get_generator(origin=Vector((0, y, z)))
# setup per segment manipulators
if len(g.segs) > 0:
f = g.segs[0]
# z
n = f.straight(-1, 0).v.to_3d()
self.manipulators[0].set_pts([(0, 0, 0), (0, 0, self.z), (1, 0, 0)], normal=n)
# left width
n = f.sized_normal(0, -self.width_left)
self.manipulators[1].set_pts([n.p0.to_3d(), n.p1.to_3d(), (-1, 0, 0)])
# right width
n = f.sized_normal(0, self.width_right)
self.manipulators[2].set_pts([n.p0.to_3d(), n.p1.to_3d(), (1, 0, 0)])
g.make_roof(context)
# update childs here so parent may use
# new holes when parent shape does change
if update_childs:
self.update_childs(context, o, g)
# on t_child
if d is not None and update_hole:
hole_obj = self.find_hole(context, o)
g.make_hole(context, hole_obj, o, self, update_parent)
# print("make_hole")
# add cutters
g.boundary(context, o)
if self.draft and not self.schrinkwrap_target:
g.draft(context, verts, edges)
g.gutter(self, verts, faces, edges, matids, uvs)
self.make_surface(o, verts, edges)
else:
if not self.schrinkwrap_target:
if self.bargeboard_enable:
g.bargeboard(self, verts, faces, edges, matids, uvs)
if self.fascia_enable:
g.fascia(self, verts, faces, edges, matids, uvs)
if self.beam_enable:
g.beam_primary(self, verts, faces, edges, matids, uvs)
g.hips(self, verts, faces, edges, matids, uvs)
if self.gutter_enable:
g.gutter(self, verts, faces, edges, matids, uvs)
bmed.buildmesh(
context, o, verts, faces, matids=matids, uvs=uvs,
weld=False, clean=False, auto_smooth=True, temporary=False)
if self.schrinkwrap_target:
g.lambris(context, o, self)
else:
if self.rafter_enable:
# bpy.ops.object.mode_set(mode='EDIT')
g.rafter(context, o, self)
# print("rafter")
if self.quick_edit and not force_update:
if self.tile_enable:
bpy.ops.archipack.roof_throttle_update(name=o.name)
else:
# throttle here
if self.tile_enable:
g.couverture(context, o, self)
if not self.schrinkwrap_target:
target = self.find_shrinkwrap(o)
self.create_shrinkwrap(context, o, target)
# enable manipulators rebuild
if manipulable_refresh and not self.schrinkwrap_target:
self.manipulable_refresh = True
# print("rafter")
# restore context
self.restore_context(context)
# print("restore context")
def find_hole(self, context, o):
p, d = self.find_parent(context)
if d is not None:
for child in p.children:
cd = archipack_roof_cutter.datablock(child)
if cd is not None and cd.boundary == o.name:
return child
return None
def manipulable_setup(self, context):
"""
NOTE:
this one assume context.active_object is the instance this
data belongs to, failing to do so will result in wrong
manipulators set on active object
"""
if self.schrinkwrap_target:
return
self.manipulable_disable(context)
o = context.active_object
self.setup_manipulators()
for i, part in enumerate(self.parts):
if i > 0:
# start angle
self.manip_stack.append(part.manipulators[0].setup(context, o, part))
if part.constraint_type == 'HORIZONTAL':
# length / radius + angle
self.manip_stack.append(part.manipulators[1].setup(context, o, part))
# index
self.manip_stack.append(part.manipulators[2].setup(context, o, self))
# size left
if part.auto_left in {'WIDTH', 'ALL'}:
self.manip_stack.append(part.manipulators[3].setup(context, o, part))
# size right
if part.auto_right in {'WIDTH', 'ALL'}:
self.manip_stack.append(part.manipulators[4].setup(context, o, part))
# slope left
if part.auto_left in {'SLOPE', 'ALL'}:
self.manip_stack.append(part.manipulators[5].setup(context, o, part))
# slope right
if part.auto_right in {'SLOPE', 'ALL'}:
self.manip_stack.append(part.manipulators[6].setup(context, o, part))
for m in self.manipulators:
self.manip_stack.append(m.setup(context, o, self))
def draw(self, layout, context):
box = layout.box()
row = box.row()
if self.parts_expand:
row.prop(self, 'parts_expand', icon="TRIA_DOWN", icon_only=True, text="Parts", emboss=False)
box.prop(self, 'n_parts')
for i, part in enumerate(self.parts):
part.draw(layout, context, i)
else:
row.prop(self, 'parts_expand', icon="TRIA_RIGHT", icon_only=True, text="Parts", emboss=False)
def update_hole(self, context):
# update parent's roof only when manipulated
self.update(context, update_parent=True)
def update_operation(self, context):
self.reverse(context, make_ccw=(self.operation == 'INTERSECTION'))
class archipack_roof_cutter_segment(ArchipackCutterPart, PropertyGroup):
manipulators = CollectionProperty(type=archipack_manipulator)
type = EnumProperty(
name="Type",
items=(
('SIDE', 'Side', 'Side with bargeboard', 0),
('BOTTOM', 'Bottom', 'Bottom with gutter', 1),
('LINK', 'Side link', 'Side witout decoration', 2),
('AXIS', 'Top', 'Top part with hip and beam', 3)
),
default='SIDE',
update=update_hole
)
def find_in_selection(self, context):
selected = [o for o in context.selected_objects]
for o in selected:
d = archipack_roof_cutter.datablock(o)
if d:
for part in d.parts:
if part == self:
return d
return None
class archipack_roof_cutter(ArchipackCutter, ArchipackObject, Manipulable, DimensionProvider, PropertyGroup):
# boundary
parts = CollectionProperty(type=archipack_roof_cutter_segment)
boundary = StringProperty(
default="",
name="Boundary",
description="Boundary of t child to cut parent"
)
def update_points(self, context, o, pts, update_parent=False):
"""
Create boundary from roof
"""
self.auto_update = False
self.manipulable_disable(context)
self.from_points(pts)
self.manipulable_refresh = True
self.auto_update = True
if update_parent:
self.update_parent(context, o)
def update_parent(self, context, o):
d = archipack_roof.datablock(o.parent)
if d is not None:
o.parent.select = True
context.scene.objects.active = o.parent
d.update(context, update_childs=False, update_hole=False)
o.parent.select = False
context.scene.objects.active = o
class ARCHIPACK_PT_roof_cutter(Panel):
bl_idname = "ARCHIPACK_PT_roof_cutter"
bl_label = "Roof Cutter"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'ArchiPack'
@classmethod
def poll(cls, context):
return archipack_roof_cutter.filter(context.active_object)
def draw(self, context):
prop = archipack_roof_cutter.datablock(context.active_object)
if prop is None:
return
layout = self.layout
scene = context.scene
box = layout.box()
if prop.boundary != "":
box.label(text="Auto Cutter:")
box.label(text=prop.boundary)
else:
box.operator('archipack.manipulate', icon='HAND')
box.prop(prop, 'operation', text="")
box = layout.box()
prop.draw(layout, context)
class ARCHIPACK_PT_roof(Panel):
bl_idname = "ARCHIPACK_PT_roof"
bl_label = "Roof"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'ArchiPack'
@classmethod
def poll(cls, context):
return archipack_roof.filter(context.active_object)
def draw(self, context):
o = context.active_object
prop = archipack_roof.datablock(o)
if prop is None:
return
scene = context.scene
layout = self.layout
if prop.schrinkwrap_target:
layout.operator("archipack.select_parent")
return
row = layout.row(align=True)
row.operator('archipack.manipulate', icon='HAND')
row.operator('archipack.roof', text="Delete", icon='ERROR').mode = 'DELETE'
box = layout.box()
row = box.row(align=True)
row.operator("archipack.roof_preset_menu", text=bpy.types.ARCHIPACK_OT_roof_preset_menu.bl_label)
row.operator("archipack.roof_preset", text="", icon='ZOOMIN')
row.operator("archipack.roof_preset", text="", icon='ZOOMOUT').remove_active = True
box = layout.box()
box.prop_search(prop, "t_parent", scene, "objects", text="Parent", icon='OBJECT_DATA')
layout.operator('archipack.roof_cutter').parent = o.name
p, d = prop.find_parent(context)
if d is not None:
box.prop(prop, 't_part')
box.prop(prop, 't_dist_x')
box.prop(prop, 't_dist_y')
box.label(text="Hole")
box.prop(prop, 'hole_offset_front')
box.prop(prop, 'hole_offset_left')
box.prop(prop, 'hole_offset_right')
box = layout.box()
box.prop(prop, 'quick_edit', icon="MOD_MULTIRES")
box.prop(prop, 'draft')
if d is None:
box.prop(prop, 'z')
box.prop(prop, 'slope_left')
box.prop(prop, 'slope_right')
box.prop(prop, 'width_left')
box.prop(prop, 'width_right')
# parts
prop.draw(layout, context)
# tiles
box = layout.box()
row = box.row(align=True)
if prop.tile_expand:
row.prop(prop, 'tile_expand', icon="TRIA_DOWN", text="Covering", icon_only=True, emboss=False)
else:
row.prop(prop, 'tile_expand', icon="TRIA_RIGHT", text="Covering", icon_only=True, emboss=False)
row.prop(prop, 'tile_enable')
if prop.tile_expand:
box.prop(prop, 'tile_model', text="")
box.prop(prop, 'tile_solidify', icon='MOD_SOLIDIFY')
if prop.tile_solidify:
box.prop(prop, 'tile_height')
box.separator()
box.prop(prop, 'tile_bevel', icon='MOD_BEVEL')
if prop.tile_bevel:
box.prop(prop, 'tile_bevel_amt')
box.prop(prop, 'tile_bevel_segs')
box.separator()
box.label(text="Tile size")
box.prop(prop, 'tile_size_x')
box.prop(prop, 'tile_size_y')
box.prop(prop, 'tile_size_z')
box.prop(prop, 'tile_altitude')
box.separator()
box.label(text="Distribution")
box.prop(prop, 'tile_alternate', icon='NLA')
row = box.row(align=True)
row.prop(prop, 'tile_fit_x', icon='ALIGN')
row.prop(prop, 'tile_fit_y', icon='ALIGN')
box.prop(prop, 'tile_offset')
box.label(text="Spacing")
box.prop(prop, 'tile_space_x')
box.prop(prop, 'tile_space_y')
box.separator() # hip
box.label(text="Borders")
box.prop(prop, 'tile_side')
box.prop(prop, 'tile_couloir')
box.prop(prop, 'tile_border')
box = layout.box()
row = box.row(align=True)
if prop.hip_expand:
row.prop(prop, 'hip_expand', icon="TRIA_DOWN", text="Hip", icon_only=True, emboss=False)
else:
row.prop(prop, 'hip_expand', icon="TRIA_RIGHT", text="Hip", icon_only=True, emboss=False)
row.prop(prop, 'hip_enable')
if prop.hip_expand:
box.prop(prop, 'hip_model', text="")
box.prop(prop, 'hip_size_x')
box.prop(prop, 'hip_size_y')
box.prop(prop, 'hip_size_z')
box.prop(prop, 'hip_alt')
box.prop(prop, 'hip_space_x')
box.separator()
box.prop(prop, 'valley_enable')
box.prop(prop, 'valley_altitude')
box = layout.box()
row = box.row(align=True)
if prop.beam_expand:
row.prop(prop, 'beam_expand', icon="TRIA_DOWN", text="Beam", icon_only=True, emboss=False)
else:
row.prop(prop, 'beam_expand', icon="TRIA_RIGHT", text="Beam", icon_only=True, emboss=False)
if prop.beam_expand:
box.prop(prop, 'beam_enable')
if prop.beam_enable:
box.prop(prop, 'beam_width')
box.prop(prop, 'beam_height')
box.prop(prop, 'beam_offset')
box.prop(prop, 'beam_alt')
box.separator()
box.prop(prop, 'beam_sec_enable')
if prop.beam_sec_enable:
box.prop(prop, 'beam_sec_width')
box.prop(prop, 'beam_sec_height')
box.prop(prop, 'beam_sec_alt')
box.separator()
box.prop(prop, 'rafter_enable')
if prop.rafter_enable:
box.prop(prop, 'rafter_height')
box.prop(prop, 'rafter_width')
box.prop(prop, 'rafter_spacing')
box.prop(prop, 'rafter_start')
box.prop(prop, 'rafter_alt')
box = layout.box()
row = box.row(align=True)
if prop.gutter_expand:
row.prop(prop, 'gutter_expand', icon="TRIA_DOWN", text="Gutter", icon_only=True, emboss=False)
else:
row.prop(prop, 'gutter_expand', icon="TRIA_RIGHT", text="Gutter", icon_only=True, emboss=False)
row.prop(prop, 'gutter_enable')
if prop.gutter_expand:
box.prop(prop, 'gutter_alt')
box.prop(prop, 'gutter_width')
box.prop(prop, 'gutter_dist')
box.prop(prop, 'gutter_boudin')
box.prop(prop, 'gutter_segs')
box = layout.box()
row = box.row(align=True)
if prop.fascia_expand:
row.prop(prop, 'fascia_expand', icon="TRIA_DOWN", text="Fascia", icon_only=True, emboss=False)
else:
row.prop(prop, 'fascia_expand', icon="TRIA_RIGHT", text="Fascia", icon_only=True, emboss=False)
row.prop(prop, 'fascia_enable')
if prop.fascia_expand:
box.prop(prop, 'fascia_altitude')
box.prop(prop, 'fascia_width')
box.prop(prop, 'fascia_height')
box.prop(prop, 'fascia_offset')
box = layout.box()
row = box.row(align=True)
if prop.bargeboard_expand:
row.prop(prop, 'bargeboard_expand', icon="TRIA_DOWN", text="Bargeboard", icon_only=True, emboss=False)
else:
row.prop(prop, 'bargeboard_expand', icon="TRIA_RIGHT", text="Bargeboard", icon_only=True, emboss=False)
row.prop(prop, 'bargeboard_enable')
if prop.bargeboard_expand:
box.prop(prop, 'bargeboard_altitude')
box.prop(prop, 'bargeboard_width')
box.prop(prop, 'bargeboard_height')
box.prop(prop, 'bargeboard_offset')
# ------------------------------------------------------------------
# Define operator class to create object
# ------------------------------------------------------------------
class ARCHIPACK_OT_roof(ArchipackCreateTool, Operator):
bl_idname = "archipack.roof"
bl_label = "Roof"
bl_description = "Roof"
bl_category = 'Archipack'
bl_options = {'REGISTER', 'UNDO'}
mode = EnumProperty(
items=(
('CREATE', 'Create', '', 0),
('DELETE', 'Delete', '', 1)
),
default='CREATE'
)
def delete(self, context):
o = context.active_object
if archipack_roof.filter(o):
bpy.ops.archipack.disable_manipulate()
self.delete_object(context, o)
def create(self, context):
m = bpy.data.meshes.new("Roof")
o = bpy.data.objects.new("Roof", m)
d = m.archipack_roof.add()
# make manipulators selectable
d.manipulable_selectable = True
context.scene.objects.link(o)
o.select = True
context.scene.objects.active = o
self.add_material(o)
# disable progress bar when
# background render thumbs
if not self.auto_manipulate:
d.quick_edit = False
self.load_preset(d)
return o
# -----------------------------------------------------
# Execute
# -----------------------------------------------------
def execute(self, context):
if context.mode == "OBJECT":
if self.mode == 'CREATE':
bpy.ops.object.select_all(action="DESELECT")
o = self.create(context)
o.location = context.scene.cursor_location
o.select = True
context.scene.objects.active = o
self.manipulate()
else:
self.delete(context)
return {'FINISHED'}
else:
self.report({'WARNING'}, "Archipack: Option only valid in Object mode")
return {'CANCELLED'}
class ARCHIPACK_OT_roof_cutter(ArchipackCreateTool, Operator):
bl_idname = "archipack.roof_cutter"
bl_label = "Roof Cutter"
bl_description = "Roof Cutter"
bl_category = 'Archipack'
bl_options = {'REGISTER', 'UNDO'}
parent = StringProperty("")
def create(self, context):
m = bpy.data.meshes.new("Roof Cutter")
o = bpy.data.objects.new("Roof Cutter", m)
d = m.archipack_roof_cutter.add()
parent = context.scene.objects.get(self.parent)
if parent is not None:
o.parent = parent
bbox = parent.bound_box
angle_90 = pi / 2
x0, y0, z = bbox[0]
x1, y1, z = bbox[6]
x = 0.2 * (x1 - x0)
y = 0.2 * (y1 - y0)
o.matrix_world = parent.matrix_world * Matrix.Translation(Vector((-3 * x, 0, 0)))
p = d.parts.add()
p.a0 = - angle_90
p.length = y
p = d.parts.add()
p.a0 = angle_90
p.length = x
p = d.parts.add()
p.a0 = angle_90
p.length = y
d.n_parts = 3
# d.close = True
pd = archipack_roof.datablock(parent)
pd.boundary = o.name
else:
o.location = context.scene.cursor_location
# make manipulators selectable
d.manipulable_selectable = True
context.scene.objects.link(o)
o.select = True
context.scene.objects.active = o
self.add_material(o)
self.load_preset(d)
update_operation(d, context)
return o
# -----------------------------------------------------
# Execute
# -----------------------------------------------------
def execute(self, context):
if context.mode == "OBJECT":
bpy.ops.object.select_all(action="DESELECT")
o = self.create(context)
o.select = True
context.scene.objects.active = o
self.manipulate()
return {'FINISHED'}
else:
self.report({'WARNING'}, "Archipack: Option only valid in Object mode")
return {'CANCELLED'}
# ------------------------------------------------------------------
# Define operator class to create object
# ------------------------------------------------------------------
class ARCHIPACK_OT_roof_from_curve(Operator):
bl_idname = "archipack.roof_from_curve"
bl_label = "Roof curve"
bl_description = "Create a roof from a curve"
bl_category = 'Archipack'
bl_options = {'REGISTER', 'UNDO'}
auto_manipulate = BoolProperty(default=True)
@classmethod
def poll(self, context):
return context.active_object is not None and context.active_object.type == 'CURVE'
def draw(self, context):
layout = self.layout
row = layout.row()
row.label("Use Properties panel (N) to define parms", icon='INFO')
def create(self, context):
curve = context.active_object
m = bpy.data.meshes.new("Roof")
o = bpy.data.objects.new("Roof", m)
d = m.archipack_roof.add()
# make manipulators selectable
d.manipulable_selectable = True
d.user_defined_path = curve.name
context.scene.objects.link(o)
o.select = True
context.scene.objects.active = o
d.update_path(context)
spline = curve.data.splines[0]
if spline.type == 'POLY':
pt = spline.points[0].co
elif spline.type == 'BEZIER':
pt = spline.bezier_points[0].co
else:
pt = Vector((0, 0, 0))
# pretranslate
o.matrix_world = curve.matrix_world * Matrix.Translation(pt)
o.select = True
context.scene.objects.active = o
return o
# -----------------------------------------------------
# Execute
# -----------------------------------------------------
def execute(self, context):
if context.mode == "OBJECT":
bpy.ops.object.select_all(action="DESELECT")
self.create(context)
if self.auto_manipulate:
bpy.ops.archipack.manipulate('INVOKE_DEFAULT')
return {'FINISHED'}
else:
self.report({'WARNING'}, "Archipack: Option only valid in Object mode")
return {'CANCELLED'}
class ARCHIPACK_OT_roof_from_wall(Operator):
bl_idname = "archipack.roof_from_wall"
bl_label = "Roof"
bl_description = "Create a roof from a wall"
bl_category = 'Archipack'
bl_options = {'REGISTER', 'UNDO'}
auto_manipulate = BoolProperty(default=True)
roof_overflow = FloatProperty(
name="Overflow",
default=1.0,
min=0
)
use_small_as_axis = BoolProperty(
name="Use small side as axis",
default=False
)
cut_borders = BoolProperty(
name="Cut borders",
default=False
)
@classmethod
def poll(self, context):
o = context.active_object
return o and o.data is not None and "archipack_wall2" in o.data
def draw(self, context):
layout = self.layout
layout.prop(self, 'roof_overflow')
layout.prop(self, 'use_small_as_axis')
layout.prop(self, 'cut_borders')
def create(self, context, wall):
wd = wall.data.archipack_wall2[0]
io, exterior, childs = wd.as_geom(context, wall, 'OUTSIDE', [], [], [])
if self.cut_borders:
buffer = exterior.buffer(self.roof_overflow,
resolution=12,
join_style=2,
cap_style=3,
mitre_limit=10 * self.roof_overflow,
single_sided=False
)
tM, w, h, poly, w_pts = ShapelyOps.min_bounding_rect(buffer)
else:
tM, w, h, poly, w_pts = ShapelyOps.min_bounding_rect(exterior)
# compute height from w / h
if self.use_small_as_axis:
height = wd.z + 0.25 * max(w, h)
h, w = w, h
rM = Matrix([
[0, -1, 0, 0],
[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1],
])
else:
height = wd.z + 0.25 * min(w, h)
rM = Matrix()
bpy.ops.archipack.roof(auto_manipulate=False)
o = context.active_object
if wall.parent:
o.parent = wall.parent
else:
o.parent = wall
o.matrix_world = io.coordsys.world * tM * rM * Matrix.Translation(
Vector((-(self.roof_overflow + 0.5 * w), 0, 0)))
d = o.data.archipack_roof[0]
d.auto_update = False
d.z = height
d.width_left = self.roof_overflow + (h / 2)
d.width_right = self.roof_overflow + (h / 2)
d.parts[0].length = w + 2 * self.roof_overflow
if self.cut_borders:
# output geom as curve
result = Io.to_curve(context.scene, io.coordsys, buffer, 'buffer')
bpy.ops.archipack.roof_cutter(parent=o.name, auto_manipulate=False)
cutter = context.active_object
cutter.data.archipack_roof_cutter[0].operation = 'INTERSECTION'
cutter.data.archipack_roof_cutter[0].user_defined_path = result.name
rd = result.data
context.scene.objects.unlink(result)
bpy.data.curves.remove(rd)
o.select = True
context.scene.objects.active = o
d.auto_update = True
wall.select = True
context.scene.objects.active = wall
wall.data.archipack_wall2[0].fit_roof = True
wall.select = False
return o
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
# -----------------------------------------------------
# Execute
# -----------------------------------------------------
def execute(self, context):
if context.mode == "OBJECT":
wall = context.active_object
bpy.ops.object.select_all(action="DESELECT")
o = self.create(context, wall)
o.select = True
context.scene.objects.active = o
return {'FINISHED'}
else:
self.report({'WARNING'}, "Archipack: Option only valid in Object mode")
return {'CANCELLED'}
# Update throttle
class ArchipackThrottleHandler():
"""
One modal runs for each object at time
when call for 2nd one
update timer so first one wait more
and kill 2nd one
"""
def __init__(self, context, delay):
self._timer = None
self.start = 0
self.update_state = False
self.delay = delay
def start_timer(self, context):
self.start = time.time()
self._timer = context.window_manager.event_timer_add(self.delay, context.window)
def stop_timer(self, context):
if self._timer is not None:
context.window_manager.event_timer_remove(self._timer)
self._timer = None
def execute(self, context):
"""
refresh timer on execute
return
True if modal should run
False on complete
"""
if self._timer is None:
self.update_state = False
self.start_timer(context)
return True
# allready a timer running
self.stop_timer(context)
# prevent race conditions when allready in update mode
if self.is_updating:
return False
self.start_timer(context)
return False
def modal(self, context, event):
if event.type == 'TIMER' and not self.is_updating:
if time.time() - self.start > self.delay:
self.update_state = True
self.stop_timer(context)
return True
return False
@property
def is_updating(self):
return self.update_state
throttle_handlers = {}
throttle_delay = 1
class ARCHIPACK_OT_roof_throttle_update(Operator):
bl_idname = "archipack.roof_throttle_update"
bl_label = "Update childs with a delay"
name = StringProperty()
def kill_handler(self, context, name):
if name in throttle_handlers.keys():
throttle_handlers[name].stop_timer(context)
del throttle_handlers[self.name]
def get_handler(self, context, delay):
global throttle_handlers
if self.name not in throttle_handlers.keys():
throttle_handlers[self.name] = ArchipackThrottleHandler(context, delay)
return throttle_handlers[self.name]
def modal(self, context, event):
global throttle_handlers
if self.name in throttle_handlers.keys():
if throttle_handlers[self.name].modal(context, event):
act = context.active_object
o = context.scene.objects.get(self.name)
# print("delay update of %s" % (self.name))
if o is not None:
selected = o.select
o.select = True
context.scene.objects.active = o
d = o.data.archipack_roof[0]
d.update(context,
force_update=True,
update_parent=False)
# skip_parent_update=self.skip_parent_update)
o.select = selected
context.scene.objects.active = act
del throttle_handlers[self.name]
return {'FINISHED'}
else:
return {'PASS_THROUGH'}
else:
return {'FINISHED'}
def execute(self, context):
global throttle_delay
handler = self.get_handler(context, throttle_delay)
if handler.execute(context):
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
return {'FINISHED'}
# ------------------------------------------------------------------
# Define operator class to load / save presets
# ------------------------------------------------------------------
class ARCHIPACK_OT_roof_preset_menu(PresetMenuOperator, Operator):
bl_description = "Show Roof presets"
bl_idname = "archipack.roof_preset_menu"
bl_label = "Roof Styles"
preset_subdir = "archipack_roof"
class ARCHIPACK_OT_roof_preset(ArchipackPreset, Operator):
"""Add a Roof Styles"""
bl_idname = "archipack.roof_preset"
bl_label = "Add Roof Style"
preset_menu = "ARCHIPACK_OT_roof_preset_menu"
@property
def blacklist(self):
return ['n_parts', 'parts', 'manipulators', 'user_defined_path', 'quick_edit', 'draft']
def register():
# bpy.utils.register_class(archipack_roof_material)
bpy.utils.register_class(archipack_roof_cutter_segment)
bpy.utils.register_class(archipack_roof_cutter)
bpy.utils.register_class(ARCHIPACK_PT_roof_cutter)
bpy.utils.register_class(ARCHIPACK_OT_roof_cutter)
Mesh.archipack_roof_cutter = CollectionProperty(type=archipack_roof_cutter)
bpy.utils.register_class(archipack_roof_segment)
bpy.utils.register_class(archipack_roof)
Mesh.archipack_roof = CollectionProperty(type=archipack_roof)
bpy.utils.register_class(ARCHIPACK_OT_roof_preset_menu)
bpy.utils.register_class(ARCHIPACK_PT_roof)
bpy.utils.register_class(ARCHIPACK_OT_roof)
bpy.utils.register_class(ARCHIPACK_OT_roof_preset)
bpy.utils.register_class(ARCHIPACK_OT_roof_from_curve)
bpy.utils.register_class(ARCHIPACK_OT_roof_from_wall)
bpy.utils.register_class(ARCHIPACK_OT_roof_throttle_update)
def unregister():
# bpy.utils.unregister_class(archipack_roof_material)
bpy.utils.unregister_class(archipack_roof_cutter_segment)
bpy.utils.unregister_class(archipack_roof_cutter)
bpy.utils.unregister_class(ARCHIPACK_PT_roof_cutter)
bpy.utils.unregister_class(ARCHIPACK_OT_roof_cutter)
del Mesh.archipack_roof_cutter
bpy.utils.unregister_class(archipack_roof_segment)
bpy.utils.unregister_class(archipack_roof)
del Mesh.archipack_roof
bpy.utils.unregister_class(ARCHIPACK_OT_roof_preset_menu)
bpy.utils.unregister_class(ARCHIPACK_PT_roof)
bpy.utils.unregister_class(ARCHIPACK_OT_roof)
bpy.utils.unregister_class(ARCHIPACK_OT_roof_preset)
bpy.utils.unregister_class(ARCHIPACK_OT_roof_from_curve)
bpy.utils.unregister_class(ARCHIPACK_OT_roof_from_wall)
bpy.utils.unregister_class(ARCHIPACK_OT_roof_throttle_update)
| s-leger/archipack | archipack_roof.py | Python | gpl-3.0 | 197,868 |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from collections import defaultdict
from datetime import datetime
from itertools import combinations
from flask import session
from sqlalchemy.orm import contains_eager
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.modules.rb.models.rooms import Room
from indico.modules.rb.util import TempReservationConcurrentOccurrence, TempReservationOccurrence, rb_is_admin
from indico.util.date_time import get_overlap
from indico.util.struct.iterables import group_list
def get_rooms_conflicts(rooms, start_dt, end_dt, repeat_frequency, repeat_interval, blocked_rooms,
nonbookable_periods, unbookable_hours, skip_conflicts_with=None, allow_admin=False,
skip_past_conflicts=False):
rooms_conflicts = defaultdict(set)
rooms_pre_conflicts = defaultdict(set)
rooms_conflicting_candidates = defaultdict(set)
skip_conflicts_with = skip_conflicts_with or []
candidates = ReservationOccurrence.create_series(start_dt, end_dt, (repeat_frequency, repeat_interval))
room_ids = [room.id for room in rooms]
query = (ReservationOccurrence.query
.filter(Reservation.room_id.in_(room_ids),
ReservationOccurrence.is_valid,
ReservationOccurrence.filter_overlap(candidates))
.join(ReservationOccurrence.reservation)
.options(ReservationOccurrence.NO_RESERVATION_USER_STRATEGY,
contains_eager(ReservationOccurrence.reservation)))
if skip_conflicts_with:
query = query.filter(~Reservation.id.in_(skip_conflicts_with))
if skip_past_conflicts:
query = query.filter(ReservationOccurrence.start_dt > datetime.now())
overlapping_occurrences = group_list(query, key=lambda obj: obj.reservation.room.id)
for room_id, occurrences in overlapping_occurrences.iteritems():
conflicts = get_room_bookings_conflicts(candidates, occurrences, skip_conflicts_with)
rooms_conflicts[room_id], rooms_pre_conflicts[room_id], rooms_conflicting_candidates[room_id] = conflicts
for room_id, occurrences in blocked_rooms.iteritems():
conflicts, conflicting_candidates = get_room_blockings_conflicts(room_id, candidates, occurrences)
rooms_conflicts[room_id] |= conflicts
rooms_conflicting_candidates[room_id] |= conflicting_candidates
if not (allow_admin and rb_is_admin(session.user)):
for room_id, occurrences in nonbookable_periods.iteritems():
room = Room.get_one(room_id)
if not room.can_override(session.user, allow_admin=allow_admin):
conflicts, conflicting_candidates = get_room_nonbookable_periods_conflicts(candidates, occurrences)
rooms_conflicts[room_id] |= conflicts
rooms_conflicting_candidates[room_id] |= conflicting_candidates
for room_id, occurrences in unbookable_hours.iteritems():
room = Room.get_one(room_id)
if not room.can_override(session.user, allow_admin=allow_admin):
conflicts, conflicting_candidates = get_room_unbookable_hours_conflicts(candidates, occurrences)
rooms_conflicts[room_id] |= conflicts
rooms_conflicting_candidates[room_id] |= conflicting_candidates
rooms_conflicting_candidates = defaultdict(list, ((k, list(v)) for k, v in rooms_conflicting_candidates.items()))
return rooms_conflicts, rooms_pre_conflicts, rooms_conflicting_candidates
def get_room_bookings_conflicts(candidates, occurrences, skip_conflicts_with=frozenset()):
conflicts = set()
pre_conflicts = set()
conflicting_candidates = set()
for candidate in candidates:
for occurrence in occurrences:
if occurrence.reservation.id in skip_conflicts_with:
continue
if candidate.overlaps(occurrence):
overlap = candidate.get_overlap(occurrence)
obj = TempReservationOccurrence(*overlap, reservation=occurrence.reservation)
if occurrence.reservation.is_accepted:
conflicting_candidates.add(candidate)
conflicts.add(obj)
else:
pre_conflicts.add(obj)
return conflicts, pre_conflicts, conflicting_candidates
def get_room_blockings_conflicts(room_id, candidates, occurrences):
conflicts = set()
conflicting_candidates = set()
for candidate in candidates:
for occurrence in occurrences:
blocking = occurrence.blocking
if blocking.start_date <= candidate.start_dt.date() <= blocking.end_date:
if blocking.can_override(session.user, room=Room.get(room_id)):
continue
conflicting_candidates.add(candidate)
obj = TempReservationOccurrence(candidate.start_dt, candidate.end_dt, None)
conflicts.add(obj)
return conflicts, conflicting_candidates
def get_room_nonbookable_periods_conflicts(candidates, occurrences):
conflicts = set()
conflicting_candidates = set()
for candidate in candidates:
for occurrence in occurrences:
overlap = get_overlap((candidate.start_dt, candidate.end_dt), (occurrence.start_dt, occurrence.end_dt))
if overlap.count(None) != len(overlap):
conflicting_candidates.add(candidate)
obj = TempReservationOccurrence(overlap[0], overlap[1], None)
conflicts.add(obj)
return conflicts, conflicting_candidates
def get_room_unbookable_hours_conflicts(candidates, occurrences):
conflicts = set()
conflicting_candidates = set()
for candidate in candidates:
for occurrence in occurrences:
hours_start_dt = candidate.start_dt.replace(hour=occurrence.start_time.hour,
minute=occurrence.start_time.minute)
hours_end_dt = candidate.end_dt.replace(hour=occurrence.end_time.hour,
minute=occurrence.end_time.minute)
overlap = get_overlap((candidate.start_dt, candidate.end_dt), (hours_start_dt, hours_end_dt))
if overlap.count(None) != len(overlap):
conflicting_candidates.add(candidate)
obj = TempReservationOccurrence(overlap[0], overlap[1], None)
conflicts.add(obj)
return conflicts, conflicting_candidates
def get_concurrent_pre_bookings(pre_bookings, skip_conflicts_with=frozenset()):
concurrent_pre_bookings = []
for (x, y) in combinations(pre_bookings, 2):
if any(pre_booking.reservation.id in skip_conflicts_with for pre_booking in [x, y]):
continue
if x.overlaps(y):
overlap = x.get_overlap(y)
obj = TempReservationConcurrentOccurrence(*overlap, reservations=[x.reservation, y.reservation])
concurrent_pre_bookings.append(obj)
return concurrent_pre_bookings
| mic4ael/indico | indico/modules/rb/operations/conflicts.py | Python | mit | 7,342 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mig_main', '0002_initial_split'),
('history', '0002_auto_20140918_0318'),
]
operations = [
migrations.CreateModel(
name='BackgroundCheck',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date_added', models.DateField(auto_now_add=True)),
('check_type', models.CharField(max_length=1, choices=[(b'U', b'UofM Background Check'), (b'B', b'BSA Training'), (b'A', b'AAPS Background Check')])),
('member', models.ForeignKey(to='mig_main.MemberProfile')),
],
options={
},
bases=(models.Model,),
),
]
| tbpmig/mig-website | history/migrations/0003_backgroundcheck.py | Python | apache-2.0 | 907 |
#!/var/www/horizon/.venv/bin/python
# $Id: rst2html.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='html', description=description)
| neumerance/deploy | .venv/bin/rst2html.py | Python | apache-2.0 | 611 |
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base import response
from taiga.base.api import viewsets
from . import permissions
from . import serializers
from . import services
import copy
class FeedbackViewSet(viewsets.ViewSet):
permission_classes = (permissions.FeedbackPermission,)
serializer_class = serializers.FeedbackEntrySerializer
def create(self, request, **kwargs):
self.check_permissions(request, "create", None)
data = copy.deepcopy(request.DATA)
data.update({"full_name": request.user.get_full_name(),
"email": request.user.email})
serializer = self.serializer_class(data=data)
if not serializer.is_valid():
return response.BadRequest(serializer.errors)
self.object = serializer.save(force_insert=True)
extra = {
"HTTP_HOST": request.META.get("HTTP_HOST", None),
"HTTP_REFERER": request.META.get("HTTP_REFERER", None),
"HTTP_USER_AGENT": request.META.get("HTTP_USER_AGENT", None),
}
services.send_feedback(self.object, extra, reply_to=[request.user.email])
return response.Ok(serializer.data)
| bdang2012/taiga-back-casting | taiga/feedback/api.py | Python | agpl-3.0 | 1,982 |
import json
data = json.loads ( '''{
"type": "PPL",
"names": [
{
"name": "Wollongong",
"language": "en"
}
]
}''' ) | rob-metalinkage/django-gazetteer | gazetteer/work.py | Python | cc0-1.0 | 126 |
"""
Django settings for {{ cookiecutter.project_name }} project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import dj_database_url
from decouple import config
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', default='',
cast=lambda v: [s.strip() for s in v.split(',')])
# Application definition
INSTALLED_APPS = [
'{{ cookiecutter.package_name }}',
'{{ cookiecutter.package_name }}.users',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_rq',
'django_rq_wrapper',
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
AUTH_USER_MODEL = 'users.User'
if DEBUG and config('DEBUG_TOOLBAR', default=True, cast=bool):
INSTALLED_APPS.extend(['debug_toolbar', ])
MIDDLEWARE.append(
'debug_toolbar.middleware.DebugToolbarMiddleware')
INTERNAL_IPS = '127.0.0.1'
ROOT_URLCONF = '{{ cookiecutter.package_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': config('TEMPLATE_DEBUG', default=DEBUG, cast=bool),
},
},
]
WSGI_APPLICATION = '{{ cookiecutter.package_name }}.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(conn_max_age=600),
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'public', 'static')
STATICFILES_STORAGE = '{{ cookiecutter.package_name }}.' \
'storage.DebugErroringCompressedManifestStaticFilesStorage'
WHITENOISE_ROOT = os.path.join(BASE_DIR, 'public')
# Media
MEDIA_URL = '/media/'
# AWS Settings for Storages
AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID', default='')
AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY', default='')
AWS_STORAGE_BUCKET_NAME = config('AWS_STORAGE_BUCKET_NAME', default='')
if not (AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME):
MEDIA_ROOT = os.path.join(BASE_DIR, 'public', 'media')
else:
INSTALLED_APPS.extend(['storages', ])
if config('AWS_S3_CUSTOM_DOMAIN', default=None):
AWS_S3_URL_PROTOCOL = 'https:'
AWS_S3_CUSTOM_DOMAIN = config('AWS_S3_CUSTOM_DOMAIN', default='')
AWS_DEFAULT_ACL = 'private'
AWS_QUERYSTRING_AUTH = True
AWS_QUERYSTRING_EXPIRE = 60 * 60 * 24
AWS_S3_FILE_OVERWRITE = False
AWS_LOCATION = 'media'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# SSL
SECURE_SSL_REDIRECT = config('SECURE_SSL_REDIRECT', default=True, cast=bool)
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SESSION_COOKIE_SECURE = SECURE_SSL_REDIRECT
CSRF_COOKIE_SECURE = SECURE_SSL_REDIRECT
# logging
# levels: https://docs.python.org/3/library/logging.html#logging-levels
DJANGO_LOG_LEVEL = config('DJANGO_LOG_LEVEL', default='ERROR').upper()
DJANGO_LOG_FORMAT = config('DJANGO_LOG_FORMAT', default='simple').lower()
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '[%(asctime)s] %(levelname)s "%(message)s"',
'datefmt': '%d/%b/%Y %H:%M:%S',
},
'verbose': {
'format': '[%(asctime)s] %(levelname)s %(name)s.%(funcName)s:%(lineno)d "%(message)s"', # noqa
'datefmt': '%d/%b/%Y %H:%M:%S',
}
},
'datefmt': '%d/%b/%Y %H:%M:%S',
'handlers': {
'console': {
'level': DJANGO_LOG_LEVEL,
'class': 'logging.StreamHandler',
'formatter': DJANGO_LOG_FORMAT,
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
},
'{{ cookiecutter.package_name }}': {
'handlers': ['console'],
'level': 'DEBUG',
},
},
}
# Redis
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': config('REDIS_URL', default='redis://localhost:6379/0'),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
},
}
# rq
RQ_QUEUES = {
'default': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
'high': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
'low': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
}
RQ_SHOW_ADMIN_LINK = True
# Sentry
SENTRY_DSN = config('SENTRY_DSN', default=None)
if SENTRY_DSN:
INSTALLED_APPS.extend([
'raven.contrib.django.raven_compat',
])
RAVEN_CONFIG = {
'dsn': SENTRY_DSN,
}
| istrategylabs/mo-django | {{cookiecutter.repo_name}}/{{cookiecutter.package_name}}/settings.py | Python | mit | 6,441 |
def fadein(clip, duration):
""" Makes the clip fade to black progressively, over ``duration``
seconds. For more advanced fading, see
``moviepy.video.composition.crossfadein`` """
return clip.fl(lambda gf, t: min(1.0 * t / duration, 1) * gf(t))
| DevinGeo/moviepy | moviepy/video/fx/fadein.py | Python | mit | 323 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, line-too-long, unused-variable, too-many-locals
"""L2 normalize in python"""
import numpy as np
def l2_normalize_python(a_np, eps, axis=None):
"""L2 normalize operator in NCHW layout.
Parameters
----------
a_np : numpy.ndarray
4-D with shape [batch, in_channel, in_height, in_width]
eps : float
epsilon constant value
axis : list of int
axis over the normalization applied
Returns
-------
l2_normalize_out : np.ndarray
4-D with shape [batch, out_channel, out_height, out_width]
"""
dot_value = np.power(a_np, 2.0)
sqr_sum = np.sum(dot_value, axis, keepdims=True)
sqrt_sum = np.sqrt(np.maximum(np.broadcast_to(sqr_sum, a_np.shape), eps))
l2_normalize_out = np.divide(a_np, sqrt_sum)
return l2_normalize_out
| Huyuwei/tvm | topi/python/topi/testing/l2_normalize_python.py | Python | apache-2.0 | 1,619 |
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="densitymapbox.hoverlabel.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/densitymapbox/hoverlabel/font/_size.py | Python | mit | 558 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0022_add_help_text_for_metadata_tags'),
]
operations = [
migrations.AddField(
model_name='languagepage',
name='main_language',
field=models.BooleanField(default=False, help_text='The main language of the site'),
),
]
| praekelt/molo | molo/core/migrations/0023_languagepage_main_language.py | Python | bsd-2-clause | 469 |
# Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz.org
# 2010-2011 Large Blue
# Fergus Doyle: fergus.doyle@largeblue.com
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from zope.interface import directlyProvides
from zope.interface import implements
from repoze.bfg.security import effective_principals
from repoze.bfg.traversal import model_path
from opencore.models.interfaces import ICatalogSearch
from opencore.models.interfaces import IGroupSearchFactory
from opencore.models.interfaces import IGroupSearch
from opencore.models.interfaces import IPeople
from opencore.models.interfaces import IPosts
from opencore.models.interfaces import IFiles
from opencore.models.interfaces import IPages
from opencore.models.interfaces import IOthers
from opencore.views.batch import get_catalog_batch_grid
def groupsearchfactory(unwrapped):
directlyProvides(unwrapped, IGroupSearchFactory)
return unwrapped
@groupsearchfactory
def people_group_search(context, request, term):
search = GroupSearch(context, request, [IPeople], term)
return search
@groupsearchfactory
def pages_group_search(context, request, term):
search = GroupSearch(context, request, [IPages], term)
return search
@groupsearchfactory
def posts_group_search(context, request, term):
search = GroupSearch(context, request, [IPosts], term)
return search
@groupsearchfactory
def files_group_search(context, request, term):
search = GroupSearch(context, request, [IFiles], term)
return search
@groupsearchfactory
def others_group_search(context, request, term):
search = GroupSearch(context, request, [IOthers], term)
return search
class GroupSearch:
implements(IGroupSearch)
def __init__(self, context, request, interfaces, term, limit=5):
self.context = context
self.request = request
self.interfaces = interfaces
self.term = term
self.limit = limit
def __call__(self):
criteria = self._makeCriteria()
criteria['limit'] = self.limit
searcher = ICatalogSearch(self.context)
num, docids, resolver = searcher(**criteria)
return num, docids, resolver
def get_batch(self):
return get_catalog_batch_grid(
self.context, self.request, **self._makeCriteria())
def _makeCriteria(self):
principals = effective_principals(self.request)
# this is always assumed to be a global search; it does no
# path filtering
criteria = {}
criteria['sort_index'] = 'texts'
criteria['texts'] = self.term
criteria['interfaces'] = {'query':self.interfaces, 'operator':'or'}
criteria['allowed'] = {'query':principals, 'operator':'or'}
return criteria
| damilare/opencore | opencore/utilities/groupsearch.py | Python | gpl-2.0 | 3,509 |
from BidirectionalCategoryEnum import BidirectionalCategoryEnum
from CharacterDecompositionMappingEnum import CharacterDecompositionMappingEnum
from GeneralCategoryEnum import GeneralCategoryEnum
class CPPGenerator:
def __init__(self, output, datas, specialCasing):
self.__output = output
self.__datas = datas
self.__specialCasing = specialCasing
def generate(self):
with open(self.__output + ".cpp", "w") as ofile:
self.__writeIncludes(ofile)
self.__writeFunctions(ofile)
self.__writeDatas(ofile)
def __writeIncludes(self, ofile):
ofile.write("#include <" + self.__output + ".hpp" + ">\n\n")
ofile.write("#include <algorithm>\n")
ofile.write("#include <string>\n\n")
def __writeFunctions(self, ofile):
self.__writeBidiCategory(ofile)
self.__writeCombining(ofile)
#self.__writeDecomposition(ofile)
self.__writeCategory(ofile)
self.__writeLower(ofile)
self.__writeMirror(ofile)
self.__writeTitle(ofile)
self.__writeUpper(ofile)
if self.__specialCasing is not None:
self.__writeSpecialLower(ofile)
self.__writeSpecialTitle(ofile)
self.__writeSpecialUpper(ofile)
def __writeBidiCategory(self, ofile):
ofile.write(self.__output + "::BidirectionalCategory " + self.__output + "::bidirectional_category(Unicode character)\n")
self.__writeFunctionRanges(ofile, "m_bidirectionals", "BidirectionalCategory")
def __writeCombining(self, ofile):
ofile.write("unsigned char " + self.__output + "::canonical_combining_classes(Unicode character)\n")
self.__writeFunctionRanges(ofile, "m_combinings", "unsigned char")
def __writeDecomposition(self, ofile):
ofile.write(self.__output + "::CharacterDecompositionMapping " + self.__output + "::character_decomposition_mapping(Unicode character)\n")
self.__writeFunctionRanges(ofile, "m_decompositions", "CharacterDecompositionMapping")
def __writeCategory(self, ofile):
ofile.write(self.__output + "::GeneralCategory " + self.__output + "::general_category(Unicode character)\n")
self.__writeFunctionRanges(ofile, "m_categories", "GeneralCategory")
def __writeLower(self, ofile):
ofile.write(self.__output + "::Unicode " + self.__output + "::lowercase_mapping(Unicode character)\n")
self.__writeFunctionFind(ofile, "m_lowers", "Unicode")
def __writeMirror(self, ofile):
ofile.write("bool " + self.__output + "::mirrored(Unicode character)\n")
self.__writeFunctionRanges(ofile, "m_mirrors", "bool")
def __writeTitle(self, ofile):
ofile.write(self.__output + "::Unicode " + self.__output + "::titlecase_mapping(Unicode character)\n")
self.__writeFunctionFind(ofile, "m_titles", "Unicode")
def __writeUpper(self, ofile):
ofile.write(self.__output + "::Unicode " + self.__output + "::uppercase_mapping(Unicode character)\n")
self.__writeFunctionFind(ofile, "m_uppers", "Unicode")
def __writeSpecialLower(self, ofile):
ofile.write("std::array<" + self.__output + "::Unicode, 3> " + self.__output + "::special_lowercase_mapping(Unicode character)\n")
self.__writeFunctionSpecialFind(ofile, "m_specialLowers", "std::array<Unicode, 3>")
def __writeSpecialTitle(self, ofile):
ofile.write("std::array<" + self.__output + "::Unicode, 3> " + self.__output + "::special_titlecase_mapping(Unicode character)\n")
self.__writeFunctionSpecialFind(ofile, "m_specialTitles", "std::array<Unicode, 3>")
def __writeSpecialUpper(self, ofile):
ofile.write("std::array<" + self.__output + "::Unicode, 3> " + self.__output + "::special_uppercase_mapping(Unicode character)\n")
self.__writeFunctionSpecialFind(ofile, "m_specialUppers", "std::array<Unicode, 3>")
def __writeFunctionRanges(self, ofile, container, type):
ofile.write("{\n")
ofile.write("\tauto it = std::upper_bound(" + container + ".begin(), " + container + ".end(), character, [](Unicode characterIt, const std::pair<Unicode, " + type + ">& itPair) {\n")
ofile.write("\t\treturn itPair.first > characterIt;\n")
ofile.write("\t});\n\n")
ofile.write("\tif (it == " + container + ".end())\n")
ofile.write("\t\tthrow std::runtime_error(\"Character \" + std::to_string(character) + \" is not handled.\");\n")
ofile.write("\telse\n")
ofile.write("\t\treturn it->second;\n")
ofile.write("}\n\n")
def __writeFunctionFind(self, ofile, container, type):
ofile.write("{\n")
ofile.write("\tauto it = std::lower_bound(" + container + ".begin(), " + container + ".end(), character, [](const std::pair<Unicode, " + type + ">& itPair, Unicode characterIt) {\n")
ofile.write("\t\treturn itPair.first < characterIt;\n")
ofile.write("\t});\n\n")
ofile.write("\tif (it == " + container + ".end())\n")
ofile.write("\t\tthrow std::runtime_error(\"Character \" + std::to_string(character) + \" is not handled.\");\n")
ofile.write("\telse\n")
ofile.write("\t{\n")
ofile.write("\t\tif (it->first != character)\n")
ofile.write("\t\t\treturn character;\n")
ofile.write("\t\telse\n")
ofile.write("\t\t\treturn it->second;\n")
ofile.write("\t}\n")
ofile.write("}\n\n")
def __writeFunctionSpecialFind(self, ofile, container, type):
ofile.write("{\n")
ofile.write("\tauto it = std::lower_bound(" + container + ".begin(), " + container + ".end(), character, [](const std::pair<Unicode, " + type + ">& itPair, Unicode characterIt) {\n")
ofile.write("\t\treturn itPair.first < characterIt;\n")
ofile.write("\t});\n\n")
ofile.write("\tif (it == " + container + ".end())\n")
ofile.write("\t\treturn { '\\0' };\n")
ofile.write("\telse\n")
ofile.write("\t{\n")
ofile.write("\t\tif (it->first != character)\n")
ofile.write("\t\t\treturn { '\\0' };\n")
ofile.write("\t\telse\n")
ofile.write("\t\t\treturn it->second;\n")
ofile.write("\t}\n")
ofile.write("}\n\n")
def __writeDatas(self, ofile):
self.__writeStatic(ofile, self.__datas[0], "m_bidirectionals", "BidirectionalCategory", True, True)
self.__writeStatic(ofile, self.__datas[1], "m_categories", "GeneralCategory", True, True)
self.__writeStatic(ofile, self.__datas[2], "m_combinings", "unsigned char")
self.__writeStatic(ofile, self.__datas[3], "m_lowers", "Unicode", True)
self.__writeStatic(ofile, self.__datas[4], "m_mirrors", "bool")
self.__writeStatic(ofile, self.__datas[5], "m_titles", "Unicode", True)
self.__writeStatic(ofile, self.__datas[6], "m_uppers", "Unicode", True)
if self.__specialCasing is not None:
self.__writeStatic(ofile, self.__specialCasing[0], "m_specialLowers", "Unicode", True, True, True)
self.__writeStatic(ofile, self.__specialCasing[1], "m_specialTitles", "Unicode", True, True, True)
self.__writeStatic(ofile, self.__specialCasing[2], "m_specialUppers", "Unicode", True, True, True)
def __writeStatic(self, ofile, data, container, type, scopedType = False, scopedEnum = False, multi = False):
if scopedType and not multi:
ofile.write("std::array<std::pair<" + self.__output + "::Unicode, " + self.__output + "::" + type + ">, " + str(len(data)) + "> " + self.__output + "::" + container + " { {\n")
elif scopedType:
ofile.write("std::array<std::pair<" + self.__output + "::Unicode, std::array<" + self.__output + "::" + type + ", 3> >, " + str(len(data)) + "> " + self.__output + "::" + container + " { {\n")
else:
ofile.write("std::array<std::pair<" + self.__output + "::Unicode, " + type + ">, " + str(len(data)) + "> " + self.__output + "::" + container + " { {\n")
if type != "Unicode":
if type == "bool":
for tupleData in data:
ofile.write("\t{" + str(tupleData[0]) + ", " + str(tupleData[1]).lower() + "},\n")
else:
if not scopedEnum:
for tupleData in data:
ofile.write("\t{" + str(tupleData[0]) + ", " + str(tupleData[1]) + "},\n")
else:
for tupleData in data:
ofile.write("\t{" + str(tupleData[0]) + ", " + type + "::" + str(tupleData[1]) + "},\n")
else:
if not multi:
for tupleData in data:
ofile.write("\t{" + str(tupleData[0]) + ", 0x" + str(tupleData[1]) + "},\n")
else:
for tupleData in data:
ofile.write("\t{ 0x" + str(tupleData[0]) + ", {")
for character in tupleData[1]:
ofile.write(" 0x" + str(character) + ",")
ofile.write("} },\n")
ofile.write("} };\n\n")
| Gawaboumga/String | U8String/UnicodeDataGenerator/CPPGenerator.py | Python | mit | 9,116 |
import logging
import string
import time
from django import forms
from django.contrib.auth.decorators import login_required
from django.core.files.base import ContentFile
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response, redirect
from django.template import Context, loader
from django.template.context import RequestContext
from cloud.models.metric import Metric, METRIC_SCOPES, METRIC_STATES, RETURN_DATA_TYPE
from cloud.helpers import session_flash, paginate
# Configure logging for the module name
logger = logging.getLogger(__name__)
view_vars = {
'active_menu': 'Programs',
'active_section': 'Metrics',
}
@login_required
def index(request):
global view_vars
t = loader.get_template('metrics-index.html')
metrics = Metric.objects.all()
metric_list = paginate.paginate(metrics, request)
view_vars.update({
'active_item': None,
'title': 'Metrics List',
'actions': [{
'name': 'New Metric',
'url': "/Aurora/cloud/metrics/new/",
'image': 'plus'
}]
})
c = Context({
'metric_list': metric_list,
'paginate_list': metric_list,
'view_vars': view_vars,
'request': request,
'flash': session_flash.get_flash(request)
})
return HttpResponse(t.render(c))
@login_required
def detail(request, metric_id):
global view_vars
try:
metric = Metric.objects.get(pk=metric_id)
except Metric.DoesNotExist:
raise Http404
view_vars.update({
'active_item': metric,
'title': 'Metric Details',
'actions': [{
'name': 'Back to List',
'url': '/Aurora/cloud/metrics/',
'image': 'chevron-left'
}]
})
return render_to_response('metrics-detail.html', {'metric': metric, 'view_vars': view_vars, 'request': request })
#Form for new Metric creation
class MetricForm(forms.Form):
action = "/Aurora/cloud/metrics/new/"
name = forms.CharField(max_length=200)
description = forms.CharField(widget=forms.Textarea)
returns = forms.ChoiceField(choices=RETURN_DATA_TYPE)
scope = forms.ChoiceField(choices=METRIC_SCOPES)
state = forms.ChoiceField(choices=METRIC_STATES)
file = forms.CharField(widget=forms.Textarea)
def clean(self):
# Generate a valid filename
valid = valid_chars = "-_.%s%s" % (string.ascii_letters, string.digits)
filename = self.cleaned_data['name'].replace(" ", "_")
final_filename = ""
for c in filename:
if c in valid:
final_filename += c
if len(final_filename) > 0:
self.cleaned_data["filename"] = final_filename + ".py"
else:
msg = "Invalid name"
self._errors["name"] = self.error_class([msg])
del self.cleaned_data["name"]
return self.cleaned_data
@login_required
def new(request):
if request.method == 'POST': # If the form has been submitted...
form = MetricForm(request.POST) # A form bound to the POST data
if form.is_valid(): # All validation rules pass
# Process the data in form.cleaned_data
metr = Metric()
metr.name = form.cleaned_data['name']
metr.description = form.cleaned_data['description']
metr.returns = form.cleaned_data['returns']
metr.scope = form.cleaned_data['scope']
metr.state = form.cleaned_data['state']
#Save the contents in a file
filename = form.cleaned_data['filename']
metr.file.save(filename, ContentFile(form.cleaned_data['file']))
metr.save()
session_flash.set_flash(request, "New Metric successfully created")
return redirect('cloud-metrics-index') # Redirect after POST
else:
form = MetricForm() # An unbound form
view_vars.update({
'active_item': None,
'title': 'New Metric',
'actions': [{
'name': 'Back to List',
'url': '/Aurora/cloud/metrics/',
'image': 'chevron-left'
}]
})
c = RequestContext(request, {
'form': form,
'view_vars': view_vars,
'request': request,
'flash': session_flash.get_flash(request)
})
return render_to_response('base-form.html', c)
@login_required
def delete(request, metric_id):
try:
metr = Metric.objects.get(pk=metric_id)
except Metric.DoesNotExist:
raise Http404
# Delete saved file
try:
metr.file.delete()
session_flash.set_flash(request, "Metric %s was successfully deleted!" % str(metr))
logger.debug("Metric %s was successfully deleted!" % str(metr))
except:
session_flash.set_flash(request, "Could not delete file %s of metric %s: %s" % (metr.file, str(metr), str(e)), "warning")
metr.delete()
return redirect('cloud-metrics-index')
# Remote call to metrics
def web_services(request, metric_name):
# Load metric and run
try:
metric = Metric.objects.get(name=metric_name)
except Metric.DoesNotExist:
raise Http404
# File name without the '.py' and with '/' replaced by '.'
file_path = metric.file.name[0:-3].replace("/", ".")
# Generata a list to extract the classname
metric_classname = file_path.split(".")[-1]
metric_path = 'cloud.' + file_path
try:
metric_module = __import__(metric_path, fromlist=[metric_classname])
metric_class = getattr(metric_module, metric_classname)
except (ImportError, NotImplementedError) as e:
logger.error("Problems loading metric: " + str(e))
return HttpResponse("Problems loading metric: " + str(e) + " - " + metric_path + " - " + metric_classname)
except AttributeError as e:
logger.error("Must implement main class: " + str(e))
return HttpResponse("Must implement main class: " + str(e))
try:
metric = metric_class()
# Will record collecting time
t0 = time.time()
kargs = {}
for param in request.GET:
kargs[param] = request.GET.get(param)
if len(kargs) > 0:
result = metric.collect(**kargs)
else:
result = metric.collect()
collecting_time = time.time() - t0
logger.info("Metric %s successfully collected in %d seconds" % (metric_classname, round(collecting_time, 2)))
except TypeError as e:
logger.error("Wrong parameters: %s" % str(e))
return HttpResponse("Wrong parameters: %s" % str(e))
except metric.MetricException as e:
logger.error("Problems collecting metric: %s" % str(e))
return HttpResponse("Problems collecting metric: %s" % str(e))
return HttpResponse(result)
| ComputerNetworks-UFRGS/Aurora | cloud/views/metrics.py | Python | gpl-2.0 | 6,878 |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.common.execution_config import ExecutionConfig
from pyflink.common.job_execution_result import JobExecutionResult
from pyflink.common.restart_strategy import RestartStrategies, RestartStrategyConfiguration
from pyflink.java_gateway import get_gateway
from pyflink.util.utils import load_java_class
class ExecutionEnvironment(object):
"""
The ExecutionEnvironment is the context in which a program is executed.
The environment provides methods to control the job execution (such as setting the parallelism)
and to interact with the outside world (data access).
"""
def __init__(self, j_execution_environment):
self._j_execution_environment = j_execution_environment
def get_parallelism(self) -> int:
"""
Gets the parallelism with which operation are executed by default.
:return: The parallelism.
"""
return self._j_execution_environment.getParallelism()
def set_parallelism(self, parallelism: int):
"""
Sets the parallelism for operations executed through this environment.
Setting a parallelism of x here will cause all operators to run with
x parallel instances.
:param parallelism: The parallelism.
"""
self._j_execution_environment.setParallelism(parallelism)
def get_default_local_parallelism(self) -> int:
"""
Gets the default parallelism that will be used for the local execution environment.
:return: The parallelism.
"""
return self._j_execution_environment.getDefaultLocalParallelism()
def set_default_local_parallelism(self, parallelism: int):
"""
Sets the default parallelism that will be used for the local execution environment.
:param parallelism: The parallelism.
"""
self._j_execution_environment.setDefaultLocalParallelism(parallelism)
def get_config(self) -> ExecutionConfig:
"""
Gets the config object that defines execution parameters.
:return: An :class:`ExecutionConfig` object, the environment's execution configuration.
"""
return ExecutionConfig(self._j_execution_environment.getConfig())
def set_restart_strategy(self, restart_strategy_configuration: RestartStrategyConfiguration):
"""
Sets the restart strategy configuration. The configuration specifies which restart strategy
will be used for the execution graph in case of a restart.
Example:
::
>>> env.set_restart_strategy(RestartStrategies.no_restart())
:param restart_strategy_configuration: Restart strategy configuration to be set.
"""
self._j_execution_environment.setRestartStrategy(
restart_strategy_configuration._j_restart_strategy_configuration)
def get_restart_strategy(self) -> RestartStrategyConfiguration:
"""
Returns the specified restart strategy configuration.
:return: The restart strategy configuration to be used.
"""
return RestartStrategies._from_j_restart_strategy(
self._j_execution_environment.getRestartStrategy())
def add_default_kryo_serializer(self, type_class_name: str, serializer_class_name: str):
"""
Adds a new Kryo default serializer to the Runtime.
Example:
::
>>> env.add_default_kryo_serializer("com.aaa.bbb.TypeClass", "com.aaa.bbb.Serializer")
:param type_class_name: The full-qualified java class name of the types serialized with the
given serializer.
:param serializer_class_name: The full-qualified java class name of the serializer to use.
"""
type_clz = load_java_class(type_class_name)
j_serializer_clz = load_java_class(serializer_class_name)
self._j_execution_environment.addDefaultKryoSerializer(type_clz, j_serializer_clz)
def register_type_with_kryo_serializer(self, type_class_name: str, serializer_class_name: str):
"""
Registers the given Serializer via its class as a serializer for the given type at the
KryoSerializer.
Example:
::
>>> env.register_type_with_kryo_serializer("com.aaa.bbb.TypeClass",
... "com.aaa.bbb.Serializer")
:param type_class_name: The full-qualified java class name of the types serialized with
the given serializer.
:param serializer_class_name: The full-qualified java class name of the serializer to use.
"""
type_clz = load_java_class(type_class_name)
j_serializer_clz = load_java_class(serializer_class_name)
self._j_execution_environment.registerTypeWithKryoSerializer(type_clz, j_serializer_clz)
def register_type(self, type_class_name: str):
"""
Registers the given type with the serialization stack. If the type is eventually
serialized as a POJO, then the type is registered with the POJO serializer. If the
type ends up being serialized with Kryo, then it will be registered at Kryo to make
sure that only tags are written.
Example:
::
>>> env.register_type("com.aaa.bbb.TypeClass")
:param type_class_name: The full-qualified java class name of the type to register.
"""
type_clz = load_java_class(type_class_name)
self._j_execution_environment.registerType(type_clz)
def execute(self, job_name: str = None) -> JobExecutionResult:
"""
Triggers the program execution. The environment will execute all parts of the program that
have resulted in a "sink" operation.
The program execution will be logged and displayed with the given job name.
:param job_name: Desired name of the job, optional.
:return: The result of the job execution, containing elapsed time and accumulators.
"""
if job_name is None:
return JobExecutionResult(self._j_execution_environment.execute())
else:
return JobExecutionResult(self._j_execution_environment.execute(job_name))
def get_execution_plan(self) -> str:
"""
Creates the plan with which the system will execute the program, and returns it as
a String using a JSON representation of the execution data flow graph.
Note that this needs to be called, before the plan is executed.
If the compiler could not be instantiated, or the master could not
be contacted to retrieve information relevant to the execution planning,
an exception will be thrown.
:return: The execution plan of the program, as a JSON String.
"""
return self._j_execution_environment.getExecutionPlan()
@staticmethod
def get_execution_environment() -> 'ExecutionEnvironment':
"""
Creates an execution environment that represents the context in which the program is
currently executed. If the program is invoked standalone, this method returns a local
execution environment. If the program is invoked from within the command line client to be
submitted to a cluster, this method returns the execution environment of this cluster.
:return: The :class:`ExecutionEnvironment` of the context in which the program is executed.
"""
gateway = get_gateway()
j_execution_environment = gateway.jvm.org.apache.flink.api.java.ExecutionEnvironment\
.getExecutionEnvironment()
return ExecutionEnvironment(j_execution_environment)
| aljoscha/flink | flink-python/pyflink/dataset/execution_environment.py | Python | apache-2.0 | 8,613 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# isprime.py
#
# Author: Billy Wilson Arante
# Created: 2016/06/16 PHT
# Modified: 2016/10/01 EDT (America/New York)
from sys import argv
def isprime(x):
"""Checks if x is prime number
Returns true if x is a prime number, otherwise false.
"""
if x <= 1:
return False
for n in range(2, (x - 1)):
if x % n == 0:
return False
return True
def main():
"""Main"""
filename, number = argv
print isprime(int(number))
if __name__ == "__main__":
main()
| arantebillywilson/python-snippets | py2/cool-things/isprime.py | Python | mit | 565 |
# -*- coding: utf-8 -*-
"""
This module offers a parser for ISO-8601 strings
It is intended to support all valid date, time and datetime formats per the
ISO-8601 specification.
..versionadded:: 2.7.0
"""
from datetime import datetime, timedelta, time, date
import calendar
from dateutil import tz
from functools import wraps
import re
import six
__all__ = ["isoparse", "isoparser"]
def _takes_ascii(f):
@wraps(f)
def func(self, str_in, *args, **kwargs):
# If it's a stream, read the whole thing
str_in = getattr(str_in, 'read', lambda: str_in)()
# If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII
if isinstance(str_in, six.text_type):
# ASCII is the same in UTF-8
try:
str_in = str_in.encode('ascii')
except UnicodeEncodeError as e:
msg = 'ISO-8601 strings should contain only ASCII characters'
six.raise_from(ValueError(msg), e)
return f(self, str_in, *args, **kwargs)
return func
class isoparser(object):
def __init__(self, sep=None):
"""
:param sep:
A single character that separates date and time portions. If
``None``, the parser will accept any single character.
For strict ISO-8601 adherence, pass ``'T'``.
"""
if sep is not None:
if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):
raise ValueError('Separator must be a single, non-numeric ' +
'ASCII character')
sep = sep.encode('ascii')
self._sep = sep
@_takes_ascii
def isoparse(self, dt_str):
"""
Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.
An ISO-8601 datetime string consists of a date portion, followed
optionally by a time portion - the date and time portions are separated
by a single character separator, which is ``T`` in the official
standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be
combined with a time portion.
Supported date formats are:
Common:
- ``YYYY``
- ``YYYY-MM`` or ``YYYYMM``
- ``YYYY-MM-DD`` or ``YYYYMMDD``
Uncommon:
- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)
- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day
The ISO week and day numbering follows the same logic as
:func:`datetime.date.isocalendar`.
Supported time formats are:
- ``hh``
- ``hh:mm`` or ``hhmm``
- ``hh:mm:ss`` or ``hhmmss``
- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)
Midnight is a special case for `hh`, as the standard supports both
00:00 and 24:00 as a representation. The decimal separator can be
either a dot or a comma.
.. caution::
Support for fractional components other than seconds is part of the
ISO-8601 standard, but is not currently implemented in this parser.
Supported time zone offset formats are:
- `Z` (UTC)
- `±HH:MM`
- `±HHMM`
- `±HH`
Offsets will be represented as :class:`dateutil.tz.tzoffset` objects,
with the exception of UTC, which will be represented as
:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such
as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.
:param dt_str:
A string or stream containing only an ISO-8601 datetime string
:return:
Returns a :class:`datetime.datetime` representing the string.
Unspecified components default to their lowest value.
.. warning::
As of version 2.7.0, the strictness of the parser should not be
considered a stable part of the contract. Any valid ISO-8601 string
that parses correctly with the default settings will continue to
parse correctly in future versions, but invalid strings that
currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not
guaranteed to continue failing in future versions if they encode
a valid date.
.. versionadded:: 2.7.0
"""
components, pos = self._parse_isodate(dt_str)
if len(dt_str) > pos:
if self._sep is None or dt_str[pos:pos + 1] == self._sep:
components += self._parse_isotime(dt_str[pos + 1:])
else:
raise ValueError('String contains unknown ISO components')
if len(components) > 3 and components[3] == 24:
components[3] = 0
return datetime(*components) + timedelta(days=1)
return datetime(*components)
@_takes_ascii
def parse_isodate(self, datestr):
"""
Parse the date portion of an ISO string.
:param datestr:
The string portion of an ISO string, without a separator
:return:
Returns a :class:`datetime.date` object
"""
components, pos = self._parse_isodate(datestr)
if pos < len(datestr):
raise ValueError('String contains unknown ISO ' +
'components: {}'.format(datestr))
return date(*components)
@_takes_ascii
def parse_isotime(self, timestr):
"""
Parse the time portion of an ISO string.
:param timestr:
The time portion of an ISO string, without a separator
:return:
Returns a :class:`datetime.time` object
"""
components = self._parse_isotime(timestr)
if components[0] == 24:
components[0] = 0
return time(*components)
@_takes_ascii
def parse_tzstr(self, tzstr, zero_as_utc=True):
"""
Parse a valid ISO time zone string.
See :func:`isoparser.isoparse` for details on supported formats.
:param tzstr:
A string representing an ISO time zone offset
:param zero_as_utc:
Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones
:return:
Returns :class:`dateutil.tz.tzoffset` for offsets and
:class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is
specified) offsets equivalent to UTC.
"""
return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc)
# Constants
_DATE_SEP = b'-'
_TIME_SEP = b':'
_FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)')
def _parse_isodate(self, dt_str):
try:
return self._parse_isodate_common(dt_str)
except ValueError:
return self._parse_isodate_uncommon(dt_str)
def _parse_isodate_common(self, dt_str):
len_str = len(dt_str)
components = [1, 1, 1]
if len_str < 4:
raise ValueError('ISO string too short')
# Year
components[0] = int(dt_str[0:4])
pos = 4
if pos >= len_str:
return components, pos
has_sep = dt_str[pos:pos + 1] == self._DATE_SEP
if has_sep:
pos += 1
# Month
if len_str - pos < 2:
raise ValueError('Invalid common month')
components[1] = int(dt_str[pos:pos + 2])
pos += 2
if pos >= len_str:
if has_sep:
return components, pos
else:
raise ValueError('Invalid ISO format')
if has_sep:
if dt_str[pos:pos + 1] != self._DATE_SEP:
raise ValueError('Invalid separator in ISO string')
pos += 1
# Day
if len_str - pos < 2:
raise ValueError('Invalid common day')
components[2] = int(dt_str[pos:pos + 2])
return components, pos + 2
def _parse_isodate_uncommon(self, dt_str):
if len(dt_str) < 4:
raise ValueError('ISO string too short')
# All ISO formats start with the year
year = int(dt_str[0:4])
has_sep = dt_str[4:5] == self._DATE_SEP
pos = 4 + has_sep # Skip '-' if it's there
if dt_str[pos:pos + 1] == b'W':
# YYYY-?Www-?D?
pos += 1
weekno = int(dt_str[pos:pos + 2])
pos += 2
dayno = 1
if len(dt_str) > pos:
if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep:
raise ValueError('Inconsistent use of dash separator')
pos += has_sep
dayno = int(dt_str[pos:pos + 1])
pos += 1
base_date = self._calculate_weekdate(year, weekno, dayno)
else:
# YYYYDDD or YYYY-DDD
if len(dt_str) - pos < 3:
raise ValueError('Invalid ordinal day')
ordinal_day = int(dt_str[pos:pos + 3])
pos += 3
if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)):
raise ValueError('Invalid ordinal day' +
' {} for year {}'.format(ordinal_day, year))
base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1)
components = [base_date.year, base_date.month, base_date.day]
return components, pos
def _calculate_weekdate(self, year, week, day):
"""
Calculate the day of corresponding to the ISO year-week-day calendar.
This function is effectively the inverse of
:func:`datetime.date.isocalendar`.
:param year:
The year in the ISO calendar
:param week:
The week in the ISO calendar - range is [1, 53]
:param day:
The day in the ISO calendar - range is [1 (MON), 7 (SUN)]
:return:
Returns a :class:`datetime.date`
"""
if not 0 < week < 54:
raise ValueError('Invalid week: {}'.format(week))
if not 0 < day < 8: # Range is 1-7
raise ValueError('Invalid weekday: {}'.format(day))
# Get week 1 for the specific year:
jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it
week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1)
# Now add the specific number of weeks and days to get what we want
week_offset = (week - 1) * 7 + (day - 1)
return week_1 + timedelta(days=week_offset)
def _parse_isotime(self, timestr):
len_str = len(timestr)
components = [0, 0, 0, 0, None]
pos = 0
comp = -1
if len(timestr) < 2:
raise ValueError('ISO time too short')
has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP
while pos < len_str and comp < 5:
comp += 1
if timestr[pos:pos + 1] in b'-+Zz':
# Detect time zone boundary
components[-1] = self._parse_tzstr(timestr[pos:])
pos = len_str
break
if comp < 3:
# Hour, minute, second
components[comp] = int(timestr[pos:pos + 2])
pos += 2
if (has_sep and pos < len_str and
timestr[pos:pos + 1] == self._TIME_SEP):
pos += 1
if comp == 3:
# Fraction of a second
frac = self._FRACTION_REGEX.match(timestr[pos:])
if not frac:
continue
us_str = frac.group(1)[:6] # Truncate to microseconds
components[comp] = int(us_str) * 10**(6 - len(us_str))
pos += len(frac.group())
if pos < len_str:
raise ValueError('Unused components in ISO string')
if components[0] == 24:
# Standard supports 00:00 and 24:00 as representations of midnight
if any(component != 0 for component in components[1:4]):
raise ValueError('Hour may only be 24 at 24:00:00.000')
return components
def _parse_tzstr(self, tzstr, zero_as_utc=True):
if tzstr == b'Z' or tzstr == b'z':
return tz.UTC
if len(tzstr) not in {3, 5, 6}:
raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')
if tzstr[0:1] == b'-':
mult = -1
elif tzstr[0:1] == b'+':
mult = 1
else:
raise ValueError('Time zone offset requires sign')
hours = int(tzstr[1:3])
if len(tzstr) == 3:
minutes = 0
else:
minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])
if zero_as_utc and hours == 0 and minutes == 0:
return tz.UTC
else:
if minutes > 59:
raise ValueError('Invalid minutes in time zone offset')
if hours > 23:
raise ValueError('Invalid hours in time zone offset')
return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
DEFAULT_ISOPARSER = isoparser()
isoparse = DEFAULT_ISOPARSER.isoparse
| SickGear/SickGear | lib/dateutil/parser/isoparser.py | Python | gpl-3.0 | 13,509 |
# This script reads the carrier database
# and display it along a path in histogram form
# along with a representation of the carriers in energy space
from __future__ import print_function
from yambopy import *
import matplotlib.gridspec as gridspec
from scipy.optimize import curve_fit
import os
############
# SETTINGS #
############
folder = 'rt-24x24'
calc = 'QSSIN-D-100.0fs-2.07eV-300K-DG' # Where RT carrier output is
path = [[0.0,0.0,0.0],[0.5,0.0,0.0],[0.33333,0.33333,0.0],[0.0,0.0,0.0]]
nbv = 2 ; nbc = 2 # nb of valence and conduction bands
########
# INIT #
########
# For saving pictures
os.system('mkdir -p occupations_v2/%s/%s'%(folder,calc))
# Instance containing bandstructure (as used in RT sim) and occupations
yrt = YamboRTDB(folder=folder,calc=calc)
yrt.get_path(path) # Generates kindex and distances
### aliases
times = [i * 1e15 for i in yrt.times] # carriers output times, in fs
nbands = yrt.nbands # number of bands in the RT simulation
if nbv+nbc != nbands:
raise NameError('Incompatible number of bands, set nbv and nbc in script.')
## 'path-plot' variables
kindex = yrt.bands_indexes # kpoint indexes (in order) to draw path
distances = yrt.bands_distances
eigenvalues = yrt.eigenvalues[kindex,:] # eigenvalues of the bands included in the RT simulation
#
max_occ = np.amax(yrt.occupations[:,kindex,:]) # used to size the distribution plots
occupations = yrt.occupations[:,kindex,:]/max_occ # format time,kindex,band index (from 0 to nbands, only on path)
#
##
## 'fit' variables and function
# FD distrib for fit
def fermi_dirac(E,a,T): # declare E first for fit
return 1/(1+np.exp((E-a)/T))
#
KtoeV = 8.61733e-5
#
# xeng is an array of values to plot the fit properly
xeng = np.linspace(np.amin(eigenvalues[:,list(range(nbv))]), np.amax(eigenvalues[:,list(range(nbv,nbands))]),1000)
##
##############
# EXT. FIELD #
##############
# The external field is read from the o- file
ext = np.loadtxt('%s/%s/pulse/o-pulse.external_field'%(folder,calc))
field = ext[:,2]/max(abs(ext[:,2])) # polarization : x=1,y=2,z=3
##################
# ENERGY DISTRIB #
##################
# Sort the (n,k) pairs between positive and negative energies
# (If the same energy appears twice, it must not be summed over)
list_e=[] ; list_h=[]
for k in range(yrt.nkpoints):
for n in range(yrt.nbands):
e = yrt.eigenvalues[k,n]
if e<=0.0:
list_h.append((k,n))
else:
list_e.append((k,n))
# Map (k,n) to a single index for e and h
# then get the list of indices to sort the energies
nrj = np.zeros((len(list_e)))
for i,(k,n) in enumerate(list_e):
nrj[i]=yrt.eigenvalues[k,n]
sorted_e = np.argsort(nrj)
nrj = np.zeros((len(list_h)))
for i,(k,n) in enumerate(list_h):
nrj[i]=yrt.eigenvalues[k,n]
sorted_h = np.argsort(nrj)
# Build the occupation tables occ_x[t,(nk)_index,(e|occ)]
occ_e = np.zeros((len(times),len(list_e),2))
for t in range(len(times)):
for i,(k,n) in enumerate(list_e):
occ_e[t,i,0]=yrt.eigenvalues[k,n]
occ_e[t,i,1]=yrt.occupations[t,k,n]
occ_h = np.zeros((len(times),len(list_h),2))
for t in range(len(times)):
for i,(k,n) in enumerate(list_h):
occ_h[t,i,0]=yrt.eigenvalues[k,n]
occ_h[t,i,1]=yrt.occupations[t,k,n]
# Sorting
for t in range(len(times)):
occ_e[t,:,:]=occ_e[t,sorted_e,:]
occ_h[t,:,:]=occ_h[t,sorted_h,:]
# *(-1) on holes to fit the same way as electrons
occ_h *= -1
#################
# BAR PLOT DATA #
#################
# Fill arrays with occupation of valence and conduction bands
# Recall that 'occupations' was normalized
# one entry per band
occ_v = np.zeros((len(times),len(kindex),nbv))
occ_c = np.zeros((len(times),len(kindex),nbc))
for n in range(nbv):
occ_v[:,:,n] = -occupations[:,:,n] # minus sign to get positive occupations
for n in range(nbc):
occ_c[:,:,n] = occupations[:,:,n+nbv] # +nbv to read CBs
####################
# TIME LOOP & PLOT #
####################
# Gridspec allows to place subplots on a grid
# spacing for exemple can be customised
gs = gridspec.GridSpec(9, 8)
# y range for band structure & energy plots
ymin_v= np.amin(eigenvalues[:,:nbv])-0.1
ymin_c= np.amin(eigenvalues[:,nbv:])-0.1
ymax_v= np.amax(eigenvalues[:,:nbv])+0.1
ymax_c= np.amax(eigenvalues[:,nbv:])+0.1
###
for t in range(len(times)):
#for t in (30,):
i=t
print(times[i])
name = 'occupations_v2/'+folder+'/'+calc+'/%d.png' % (times[t])
fig = plt.figure()
fig.suptitle('Occupation of the bands and fit to the Fermi-Dirac distribution',fontsize=14,ha='center')
####### bandstructure w/ occupation plot
ax1c = plt.subplot(gs[0:4,0:-2])
ax1v = plt.subplot(gs[4:8,0:-2])
# remove x ticks
ax1c.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
ax1v.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
# set x range
ax1c.set_xlim((0,distances[-1]))
ax1v.set_xlim((0,distances[-1]))
# y range is defined with ax3 and ax4 (they share y axis with ax1)
# Plot band structure
ax1v.plot(distances,eigenvalues[:,:nbv],'k-',lw=2,zorder=0)
ax1c.plot(distances,eigenvalues[:,nbv:],'k-',lw=2,zorder=0)
# VB
for n in range(nbv):
ax1v.scatter(distances,eigenvalues[:,n],s=400*occ_v[t,:,n],color='blue',alpha=0.5)
# CB
for n in range(nbc):
ax1c.scatter(distances,eigenvalues[:,nbv+n],s=400*occ_c[t,:,n],color='red',alpha=0.5)
# text and labels
fig.text(0.05,0.6,'Energy (eV)',size=16,rotation='vertical')
fig.text(0.50,0.91, '%d fs'%times[t],size=16)
######## field plot
ax2 = plt.subplot(gs[-1,:])
# remove ticks and labels
ax2.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
ax2.tick_params(axis='y',which='both',left='off',right='off',labelleft='off')
# text
ax2.set_ylabel('Field')
# frame size
ax2.set_xlim((0,times[-1]))
ax2.set_ylim((-1.3,1.3))
ax2.plot(field[:int(times[t])])
## Plot of the occupation as a function of energy (rotated to match the band structure)
ax3 = plt.subplot(gs[0:4,-2:],sharey=ax1c)
ax4 = plt.subplot(gs[4:8,-2:],sharey=ax1v)
# plot the data
try: # does not break if fit is not found
fit,cov = curve_fit(fermi_dirac,occ_e[i,:,0],occ_e[i,:,1])
except RuntimeError:
fit=np.array([0,0])
ax3.fill_betweenx(occ_e[i,:,0],0,occ_e[i,:,1],color='red')
ax3.plot(fermi_dirac(xeng,fit[0],fit[1]),xeng,'k-')
ax3.text(0.5,0.9,'Electrons\nT = %d K'%(fit[1]/KtoeV),transform=ax3.transAxes,ha='center',va='center')
try:
fit,cov = curve_fit(fermi_dirac,occ_h[i,:,0],occ_h[i,:,1])
except RuntimeError:
fit=np.array([0,0])
ax4.fill_betweenx(-occ_h[i,:,0],0,occ_h[i,:,1],color='blue')
ax4.plot(fermi_dirac(xeng,fit[0],fit[1]),-xeng,'k-')
ax4.text(0.5,0.1,'Holes\nT = %d K'%(fit[1]/KtoeV),transform=ax4.transAxes,ha='center',va='center')
# set x and y range
ax4.set_xlim(-0.1*max_occ,1.1*max_occ)
ax3.set_xlim(-0.1*max_occ,1.1*max_occ)
ax3.set_ylim(( ymin_c,ymax_c ))
ax4.set_ylim(( ymin_v,ymax_v ))
# hide some ticks/labels
ax3.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
ax3.tick_params(axis='y',labelleft='off',labelright='off')
ax4.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
ax4.tick_params(axis='y',labelleft='off',labelright='off')
plt.savefig( name ,transparent=False,dpi=300)
print(name)
#plt.show()
plt.close(fig)
| alexandremorlet/yambopy | scripts/realtime/plot_occ2.py | Python | bsd-3-clause | 7,553 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, Extension
import codecs
import os
import platform
import sys
readme_note = """\
.. note::
For the latest source, discussion, etc, please visit the
`GitHub repository <https://github.com/spotify/annoy>`_\n\n
.. image:: https://img.shields.io/github/stars/spotify/annoy.svg
:target: https://github.com/spotify/annoy
"""
with codecs.open('README.rst', encoding='utf-8') as fobj:
long_description = readme_note + fobj.read()
# Various platform-dependent extras
extra_compile_args = ['-D_CRT_SECURE_NO_WARNINGS']
extra_link_args = []
# Not all CPUs have march as a tuning parameter
cputune = ['-march=native',]
if platform.machine() == 'ppc64le':
extra_compile_args += ['-mcpu=native',]
if platform.machine() == 'x86_64':
extra_compile_args += cputune
if os.name != 'nt':
extra_compile_args += ['-O3', '-ffast-math', '-fno-associative-math']
# Add multithreaded build flag for all platforms using Python 3 and
# for non-Windows Python 2 platforms
python_major_version = sys.version_info[0]
if python_major_version == 3 or (python_major_version == 2 and os.name != 'nt'):
extra_compile_args += ['-DANNOYLIB_MULTITHREADED_BUILD']
if os.name != 'nt':
extra_compile_args += ['-std=c++14']
# #349: something with OS X Mojave causes libstd not to be found
if platform.system() == 'Darwin':
extra_compile_args += ['-mmacosx-version-min=10.12']
extra_link_args += ['-stdlib=libc++', '-mmacosx-version-min=10.12']
# Manual configuration, you're on your own here.
manual_compiler_args = os.environ.get('ANNOY_COMPILER_ARGS', None)
if manual_compiler_args:
extra_compile_args = manual_compiler_args.split(',')
manual_linker_args = os.environ.get('ANNOY_LINKER_ARGS', None)
if manual_linker_args:
extra_link_args = manual_linker_args.split(',')
setup(name='annoy',
version='1.17.0',
description='Approximate Nearest Neighbors in C++/Python optimized for memory usage and loading/saving to disk.',
packages=['annoy'],
ext_modules=[
Extension(
'annoy.annoylib', ['src/annoymodule.cc'],
depends=['src/annoylib.h', 'src/kissrandom.h', 'src/mman.h'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
)
],
long_description=long_description,
author='Erik Bernhardsson',
author_email='mail@erikbern.com',
url='https://github.com/spotify/annoy',
license='Apache License 2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='nns, approximate nearest neighbor search',
setup_requires=['nose>=1.0'],
tests_require=['numpy', 'h5py']
)
| eddelbuettel/annoy | setup.py | Python | apache-2.0 | 3,711 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import whatstyle
if __name__ == "__main__":
import doctest
doctest.testmod(whatstyle)
| mikr/whatstyle | tests/test_doc.py | Python | mit | 142 |
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2015 SciFabric LTD.
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
| geotagx/pybossa | test/test_jobs/test_send_mail.py | Python | agpl-3.0 | 1,418 |
# coding=utf-8
from system.translations import Translations
__author__ = 'Sean'
_ = Translations().get()
class Channel(object):
"""
A channel - Represents a channel on a protocol. Subclass this!
@ivar name The name of the channel
@ivar users A set containing all the User objects in the channel
"""
def __init__(self, name, protocol=None):
"""
Initialise the channel. Remember to call super in subclasses!
:arg name: The name of the channel
:type name: str
:arg protocol: The protocol object this channel belongs to
:type protocol: Protocol
"""
self.name = name # This is essential!
self.protocol = protocol # May be None for one-off or fake channels
self.users = set() # This is also essential!
def respond(self, message):
raise NotImplementedError(_("This method must be overridden"))
def add_user(self, user):
self.users.add(user)
def remove_user(self, user):
try:
self.users.remove(user)
except KeyError:
self.protocol.log.debug(
"Tried to remove non-existent user \"%s\" from channel \"%s\""
% (user, self)
)
def __json__(self): # TODO
"""
Return a representation of your object that can be json-encoded
For example, a dict, or a JSON string that represents the data in
the object
"""
raise NotImplementedError("This method must be overridden")
| UltrosBot/Ultros | system/protocols/generic/channel.py | Python | artistic-2.0 | 1,538 |
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'a!w6-=30*n7xgr#w1dii5l!kg#64tlrm^ir4r=ne=&^vwz@m97'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
LOGIN_REDIRECT_URL = '/'
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
import dj_database_url
DATABASES['default'] = dj_database_url.config()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = ['*']
STATIC_ROOT = 'staticfiles'
DEBUG = False
try:
from .local_settings import *
except ImportError:
pass
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
| maciek263/django2 | mysite/settings.py | Python | mit | 3,005 |
# -*- coding: UTF-8 -*-
# Copyright (C) 2007 Sylvain Taverne <sylvain@itaapy.com>
# Copyright (C) 2007-2008 Henry Obein <henry@itaapy.com>
# Copyright (C) 2007-2008 Juan David Ibáñez Palomar <jdavid@itaapy.com>
# Copyright (C) 2007-2008, 2010 Hervé Cauwelier <herve@itaapy.com>
# Copyright (C) 2008 Gautier Hayoun <gautier.hayoun@itaapy.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from re import compile
from urllib import urlencode
# Import from docutils
from docutils import nodes
from docutils.core import publish_doctree
from docutils.languages.en import labels
from docutils.readers import get_reader_class
from docutils.parsers.rst import directives, Directive
from docutils.parsers.rst.directives import register_directive
from docutils.utils import SystemMessage
# Import from itools
from itools.gettext import MSG
from itools.handlers import checkid
from itools.uri import get_reference
from itools.web import get_context
# Import from ikaaro
from ikaaro.text import Text
from ikaaro.resource_ import DBResource
from page_views import WikiPage_View, WikiPage_Edit, WikiPage_Help
from page_views import WikiPage_ToPDF, WikiPage_ToODT, WikiPage_HelpODT
from page_views import is_external, BacklinksMenu
StandaloneReader = get_reader_class('standalone')
def language(argument):
try:
return argument.encode()
except UnicodeEncodeError:
raise ValueError('expected "xx-YY" language-COUNTRY code')
def yesno(argument):
return directives.choice(argument, ('yes', 'no'))
# Class name gives the DOM element name
class book(nodes.Admonition, nodes.Element):
pass
class Book(Directive):
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = True
option_spec = {
'cover': directives.uri,
'template': directives.unchanged,
'ignore-missing-pages': yesno,
'toc-depth': directives.positive_int,
'title': directives.unchanged,
'comments': directives.unchanged,
'subject': directives.unchanged,
'language': language,
'keywords': directives.unchanged,
'filename': directives.unchanged}
has_content = True
def run(self):
self.assert_has_content()
# Default values
options = self.options
for option in ('title', 'comments', 'subject', 'keywords'):
if options.get(option) is None:
options[option] = u""
if options.get('language') is None:
# The website language, not the content language
# because the wiki is not multilingual anyway
context = get_context()
languages = context.site_root.get_property('website_languages')
language = context.accept_language.select_language(languages)
options['language'] = language
# Cover page
if self.arguments:
# Push cover as an option
cover_uri = checkid(self.arguments[0][1:-2])
options['cover'] = directives.uri(cover_uri)
# Metadata
metadata = ['Book:']
for key in ('toc-depth', 'ignore-missing-pages', 'title', 'comments',
'subject', 'keywords', 'language', 'filename'):
value = options.get(key)
if not value:
continue
metadata.append(' %s: %s' % (key, value))
template = options.get('template')
if template is not None:
metadata.append(' template: ')
meta_node = nodes.literal_block('Book Metadata',
'\n'.join(metadata))
meta_node.append(nodes.reference(refuri=template, text=template,
name=template,
wiki_template=True))
else:
meta_node = nodes.literal_block('Book Metadata',
'\n'.join(metadata))
book_node = book(self.block_text, **options)
if self.arguments:
# Display the cover
cover_text = self.arguments.pop(0)
textnodes, messages = self.state.inline_text(cover_text,
self.lineno)
book_node += nodes.title(cover_text, '', *textnodes)
book_node += messages
# Parse inner list
self.state.nested_parse(self.content, self.content_offset, book_node)
# Automatically number pages
for bullet_list in book_node.traverse(condition=nodes.bullet_list):
bullet_list.__class__ = nodes.enumerated_list
bullet_list.tagname = 'enumerated_list'
return [meta_node, book_node]
class WikiPage(Text):
class_id = 'WikiPage'
class_version = '20090123'
class_title = MSG(u"Wiki Page")
class_description = MSG(u"Wiki contents")
class_icon16 = '/ui/wiki/WikiPage16.png'
class_icon48 = '/ui/wiki/WikiPage48.png'
class_views = ['view', 'edit', 'externaledit', 'backlinks', 'commit_log',
'help', 'to_odt', 'help_odt']
overrides = {
# Security
'file_insertion_enabled': 0,
'raw_enabled': 0,
# Encodings
'input_encoding': 'utf-8',
'output_encoding': 'utf-8',
}
# Views
new_instance = DBResource.new_instance
view = WikiPage_View
to_pdf = WikiPage_ToPDF
edit = WikiPage_Edit
to_odt = WikiPage_ToODT
help = WikiPage_Help
help_odt = WikiPage_HelpODT
def get_text(self):
handler = self.get_value('data')
return handler.to_str() if handler else ''
#######################################################################
# Ikaaro API
#######################################################################
def get_context_menus(self):
return [BacklinksMenu()] + self.parent.get_context_menus()
def get_links(self):
base = self.abspath
try:
doctree = self.get_doctree()
except SystemMessage:
# The doctree is in a incoherent state
return set()
# Links
links = set()
for node in doctree.traverse(condition=nodes.reference):
refname = node.get('wiki_name')
if refname is False:
# Wiki link not found
title = node['name']
path = checkid(title) or title
path = base.resolve(path)
elif refname:
# Wiki link found, "refname" is the path
path = base.resolve2(refname)
else:
# Regular link, include internal ones
refuri = node.get('refuri')
if refuri is None:
continue
reference = get_reference(refuri.encode('utf_8'))
# Skip external
if is_external(reference):
continue
path = base.resolve2(reference.path)
path = str(path)
links.add(path)
# Images
for node in doctree.traverse(condition=nodes.image):
reference = get_reference(node['uri'].encode('utf_8'))
# Skip external image
if is_external(reference):
continue
# Resolve the path
path = base.resolve(reference.path)
path = str(path)
links.add(path)
return links
def update_links(self, source, target,
links_re = compile(r'(\.\. .*?: )(\S*)')):
old_data = self.get_text()
new_data = []
not_uri = 0
base = self.parent.abspath
for segment in links_re.split(old_data):
not_uri = (not_uri + 1) % 3
if not not_uri:
reference = get_reference(segment)
# Skip external link
if is_external(reference):
new_data.append(segment)
continue
# Strip the view
path = reference.path
if path and path[-1] == ';download':
path = path[:-1]
view = '/;download'
else:
view = ''
# Resolve the path
path = base.resolve(path)
# Match ?
if path == source:
segment = str(base.get_pathto(target)) + view
new_data.append(segment)
new_data = ''.join(new_data)
self.get_value('data').load_state_from_string(new_data)
get_context().database.change_resource(self)
#######################################################################
# API
#######################################################################
def resolve_link(self, title):
parent = self.parent
# Try regular resource name or path
try:
name = str(title)
except UnicodeEncodeError:
pass
else:
resource = parent.get_resource(name, soft=True)
if resource is not None:
return resource
# Convert wiki name to resource name
name = checkid(title)
if name is None:
return None
return parent.get_resource(name, soft=True)
def set_new_resource_link(self, node):
node['classes'].append('nowiki')
prefix = self.get_pathto(self.parent)
title = node['name']
title_encoded = title.encode('utf_8')
if node.attributes.get('wiki_template'):
new_type = 'application/vnd.oasis.opendocument.text'
else:
new_type = self.__class__.__name__
params = {'type': new_type,
'title': title_encoded,
'name': checkid(title) or title_encoded}
refuri = "%s/;new_resource?%s" % (prefix,
urlencode(params))
node['refuri'] = refuri
def get_doctree(self):
parent = self.parent
# Override dandling links handling
class WikiReader(StandaloneReader):
supported = ('wiki',)
def wiki_reference_resolver(target):
title = target['name']
resource = self.resolve_link(title)
if resource is None:
# Not Found
target['wiki_name'] = False
else:
# Found
target['wiki_name'] = str(resource.abspath)
return True
wiki_reference_resolver.priority = 851
unknown_reference_resolvers = [wiki_reference_resolver]
# Publish!
reader = WikiReader(parser_name='restructuredtext')
doctree = publish_doctree(self.get_text(), reader=reader,
settings_overrides=self.overrides)
# Assume internal paths are relative to the container
for node in doctree.traverse(condition=nodes.reference):
refuri = node.get('refuri')
# Skip wiki or fragment link
if node.get('wiki_name') or not refuri:
continue
reference = get_reference(refuri.encode('utf_8'))
# Skip external
if is_external(reference):
continue
# Resolve absolute path
resource = self.get_resource(reference.path, soft=True)
if resource is None:
resource = parent.get_resource(reference.path, soft=True)
if resource is None:
continue
refuri = str(resource.abspath)
# Restore fragment
if reference.fragment:
refuri = "%s#%s" % (refuri, reference.fragment)
node['refuri'] = refuri
# Assume image paths are relative to the container
for node in doctree.traverse(condition=nodes.image):
reference = get_reference(node['uri'].encode('utf_8'))
# Skip external
if is_external(reference):
continue
# Strip the view
path = reference.path
if path[-1][0] == ';':
path = path[:-1]
# Resolve absolute path
resource = parent.get_resource(path, soft=True)
if resource is not None:
node['uri'] = str(resource.abspath)
return doctree
def get_book(self):
doctree = self.get_doctree()
return doctree.next_node(condition=nodes.book)
# Register dummy book directive for ODT export
nodes._add_node_class_names(['book'])
nodes.book = book
register_directive('book', Book)
labels['book'] = ''
| hforge/wiki | page.py | Python | gpl-3.0 | 13,317 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple wrapper around sr_lobo.py to see output in graphic mode.
"""
import os
import sys
import subprocess
from threading import Thread
from Queue import Queue, Empty
from collections import deque
from itertools import islice
from Tkinter import Tk, BOTH, RIGHT, LEFT, X, END, NORMAL, DISABLED
from ttk import Frame, Button, Style, Label
import ScrolledText
import tkFileDialog
# TODO: Ya, si aprendes a empaquetar bien los widgets para aprovechar mejor
# el espacio de la ventana, haces un bind de "/" y "n" para buscar y eso,
# consigues arreglar el desaguisado de las barras de progreso, que vaya la
# interfaz más fluida y arreglar el bug que carga el report que no es...
# lo bordas.
# Funciones para hilos
def iter_except(function, exception):
"""Works like builtin 2-argument `iter()`, but stops on `exception`."""
try:
while True:
yield function()
except exception:
return
# Funciones auxiliares
def search_reportfile(ruta="."):
"""
Busca el último log creado por sr_lobo usando el nombre por defecto y
devuelve su ruta.
"""
try:
res = max([os.path.join(ruta, f) for f in os.listdir(ruta)
if f.endswith('sr_lobo.txt')], key=os.path.getctime)
except ValueError:
print "Fichero *sr_lobo.txt no encontrado en `{}`.".format(ruta)
sys.exit(1)
return res
# pylint: disable=too-few-public-methods
class RedirectText(object):
"""
Redirige la salida de sys.stdout para usar el texto en el scrolledtext.
"""
def __init__(self, text_ctrl):
""" Constructor. """
self.output = text_ctrl
def write(self, cadena):
"""
Aquí es donde realmente se escribe el texto recibido en el
control usado en el constructor como salida de datos.
"""
self.output.config(state=NORMAL)
self.output.insert(END, cadena)
self.output.see(END)
self.output.config(state=DISABLED)
self.output.update_idletasks()
# pylint: disable=no-self-use
def fileno(self):
""" Descriptor de fichero. Haré como que soy la salida estándar. """
return sys.stdout.fileno()
# pylint: disable=too-many-ancestors,too-many-instance-attributes
class SrLoboViewer(Frame):
"""
Class around all GUI stuff.
"""
def __init__(self, parent, txtfile=None):
""" Constructor. """
self.textfile = txtfile
Frame.__init__(self, parent) #, background="white")
self.parent = parent
self.init_ui()
self.center_window()
self._process = None
self._cached_stamp = 0
def init_ui(self):
""" Crea la ventana. """
self.parent.title("Sr. Lobo - Soluciono problemas")
dirname = os.path.abspath(os.path.dirname(__file__))
iconpath = os.path.join(dirname, "mr_wolf.ico")
self.parent.iconbitmap(iconpath)
self.style = Style()
self.style.theme_use("default")
self.pack(fill=BOTH, expand=True)
# ## Frame de la salida estándar (consola)
frameconsole = Frame(self)
frameconsole.pack(fill=X)
labelstdout = Label(frameconsole, text="Salida estándar:", width=15)
labelstdout.pack(side=LEFT, padx=5, pady=5)
self.consolepad = ScrolledText.ScrolledText(frameconsole,
background="black",
foreground="orange",
font="monospace",
height=5) # líneas
self.consolepad.pack(fill=X)
# ## Frame de la salida del informe (report)
framereport = Frame(self)
framereport.pack(fill=X)
labelreport = Label(framereport, text="Informe:", width=15)
labelreport.pack(side=LEFT, padx=5, pady=5)
self.reportpad = ScrolledText.ScrolledText(framereport)
self.reportpad.pack(fill=X)
# ## Botones de guardar, recargar y salir.
save_button = Button(self, text="Guardar", command=self.saveas)
save_button.pack(side=LEFT, padx=5, pady=5)
reload_button = Button(self, text="Recargar", command=self.reload)
reload_button.pack(side=LEFT, padx=5, pady=5, expand=True)
quit_button = Button(self, text="Salir", command=self.quit)
quit_button.pack(side=RIGHT, padx=5, pady=5)
self.update_idletasks()
self.redir = RedirectText(self.consolepad)
def quit(self, *args, **kw):
""" Mata los hilos pendientes y cierra la ventana. """
if self._process:
subprocess.Popen.kill(self._process)
return Frame.quit(self, *args, **kw)
def center_window(self):
""" Centra la ventana en la pantalla. """
width = 800
height = 600
screenw = self.parent.winfo_screenwidth()
screenh = self.parent.winfo_screenheight()
xpos = (screenw - width)/2
ypos = (screenh - height)/2
self.parent.geometry('%dx%d+%d+%d' % (width, height, xpos, ypos))
def saveas(self):
""" Guarda el contenido del widget de texto en un nuevo fichero. """
filename = tkFileDialog.asksaveasfile(mode='w')
if filename:
# slice off the last character from get, as an extra return is added
data = self.reportpad.get('1.0', END+'-1c')
data = data.encode("utf-8")
filename.write(data)
filename.close()
def run_sr_lobo(self, path_report=None):
"""
Ejecuta el verdadero script y carga en el widget el contenido del
fichero de salida. El fichero de salida se creará en la ruta
recibida «path_report», que será con lo que se invoque sr_lobo. Si no
se recibe nada, usa el que genere sr_lobo.py por defecto.
"""
comando = ["python", "-u", "sr_lobo.py"]
if path_report:
self.textfile = path_report
comando.append(path_report)
else:
self.textfile = None
# ##
self._process = subprocess.Popen(comando,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=0)
queue = Queue()
thread = Thread(target=self.reader_thread, args=[queue])
thread.start()
self.update_queue(queue)
# Busco el fichero que ha debido generar el Sr. Lobo
if not self.textfile:
self.textfile = search_reportfile()
# Cada vez que se actualice el fichero...
th_output = Thread(target=self.reload, args=[False])
th_output.start()
def update_queue(self, queue):
"""Update GUI with items from the queue."""
# read no more than 10000 lines, use deque to discard lines except the last one,
for line in deque(islice(iter_except(queue.get_nowait, Empty), 10000), maxlen=1):
if line is None:
return # stop updating
else:
self.redir.write(line) # update GUI
self.parent.after(40, self.update_queue, queue) # schedule next update
def reader_thread(self, queue):
"""Read subprocess output and put it into the queue."""
sys.stdout.flush()
for line in iter(self._process.stdout.readline, b''):
sys.stdout.flush()
queue.put(line)
def reload(self, force_reload=True):
"""
Recarga el contenido del fichero en el widget si ha cambiado o si
se ha pulsado el botón.
"""
if not self.textfile and force_reload:
self.textfile = search_reportfile()
if self.textfile:
stamp = os.stat(self.textfile)
if force_reload or stamp != self._cached_stamp:
filein = open(self.textfile, "r")
content = filein.read()
filein.close()
self.reportpad.config(state=NORMAL)
self.reportpad.delete('1.0', END)
self.reportpad.insert("1.0", content)
self.reportpad.see(END)
self.reportpad.config(state=DISABLED)
self._cached_stamp = stamp
def main():
""" Main function. """
root = Tk()
app = SrLoboViewer(root)
app.run_sr_lobo()
root.mainloop()
if __name__ == "__main__":
main()
| pacoqueen/ginn | ginn/api/tests/tk_sr_lobo.py | Python | gpl-2.0 | 8,568 |
#!/usr/bin/env python
# File: plot_histograms3.py
# Created on: Mon Aug 20 22:14:33 2012
# Last Change: Tue Jan 15 16:03:45 2013
# Purpose of script: <+INSERT+>
# Author: Steven Boada
import pylab as pyl
from mk_galaxy_struc import mk_galaxy_struc
galaxies = mk_galaxy_struc()
verylow =[]
low =[]
med=[]
high=[]
appendverylow = verylow.append
appendlow = low.append
appendmed = med.append
appendhigh = high.append
for galaxy in galaxies:
if galaxy.ston_I >=30.:
if 0.04 <= galaxy.ICD_IH and galaxy.ICD_IH <= 0.11:
appendlow(galaxy.z)
if 0.11 <= galaxy.ICD_IH and galaxy.ICD_IH <= 0.18:
appendmed(galaxy.z)
if 0.18 <= galaxy.ICD_IH and galaxy.ICD_IH <= 0.25:
appendhigh(galaxy.z)
else:
appendverylow(galaxy.z)
verylow1 = verylow2=verylow3=verylow4 = 0
low_1 = low_2 = low_3 = low_4 = 0
med_1 = med_2 = med_3 = med_4 = 0
high_1 = high_2 = high_3 = high_4 = 0
for z in verylow:
if 1.5 < z and z < 2.0:
verylow1 +=1
if 2.0 < z and z < 2.5:
verylow2 +=1
if 2.0 < z and z < 2.5:
verylow3 +=1
if 3.0 < z and z < 3.5:
verylow4 +=1
for z in low:
if 1.5 < z and z < 2.0:
low_1 +=1
if 2.0 < z and z < 2.5:
low_2 +=1
if 2.0 < z and z < 2.5:
low_3 +=1
if 3.0 < z and z < 3.5:
low_4 +=1
for z in med:
if 1.5 < z and z < 2.0:
med_1 +=1
if 2.0 < z and z < 2.5:
med_2 +=1
if 2.0 < z and z < 2.5:
med_3 +=1
if 3.0 < z and z < 3.5:
med_4 +=1
for z in high:
if 1.5 < z and z < 2.0:
high_1 +=1
if 2.0 < z and z < 2.5:
high_2 +=1
if 2.0 < z and z < 2.5:
high_3 +=1
if 3.0 < z and z < 3.5:
high_4 +=1
total1 = float(low_1 + med_1 + high_1 +verylow1)
total2 = float(low_2 + med_2 + high_2 +verylow2)
total3 = float(low_3 + med_3 + high_3 +verylow3)
total4 = float(low_4 + med_4 + high_4 +verylow4)
f1, f1s = pyl.subplots(4, 1, figsize=(3,9))
f1s1 = f1s[0]
f1s2 = f1s[1]
f1s3 = f1s[2]
f1s4 = f1s[3]
labels = '1.5 < z < 2.0', '2.0 < z < 2.5', '2.5 < z < 3.0', '3.0 < z < 3.5'
fractions =[verylow1/total1, low_1/total1, med_1/total1, high_1/total1]
f1s1.pie(fractions, autopct='%1.f%%', pctdistance=1.2, shadow=True)
fractions =[verylow2/total2, low_2/total2, med_2/total2, high_2/total2]
f1s2.pie(fractions, autopct='%1.f%%', pctdistance=1.2, shadow=True)
fractions =[verylow3/total3, low_3/total3, med_3/total3, high_3/total3]
f1s3.pie(fractions, autopct='%1.f%%', pctdistance=1.2, shadow=True)
fractions =[verylow4/total4, low_4/total4, med_4/total4, high_4/total4]
f1s4.pie(fractions, autopct='%1.f%%', pctdistance=1.2, shadow=True)
# Tweak the plot.
#pyl.legend(loc='center right')
#pyl.subplots_adjust(left=0.15, bottom=0.15)
#pyl.xlabel("Redshift")
#pyl.ylabel(r"$N/N_{bin}$")
#pyl.savefig('icd_vs_z_hist_IH.eps',bbox='tight')
pyl.show()
| boada/ICD | sandbox/legacy_plot_code/plot_pie_IH.py | Python | mit | 2,930 |
#!/usr/bin/python
# Copyright (c) 2016 Intel Corporation.
# SPDX-License-Identifier: MIT
import unittest as u
import re, fnmatch, os, sys
sampleMappingFile = '../examples/samples.mapping.txt'
cSamplesDir = '../examples/'
javaSamplesDir = '../examples/java/'
cppSamplesDir = '../examples/c++/'
class SampleNames(u.TestCase):
def test_existing_samples(self):
missing_c_files = []
missing_cpp_files = []
missing_java_files = []
with open (sampleMappingFile, "r") as f:
for line in f:
sampleNames = line.split();
cSampleName = sampleNames[0]
javaSampleName = sampleNames[1]
#check for C files
if cSampleName.endswith('.c'):
ok = False
for file in os.listdir(cSamplesDir):
if file == cSampleName:
ok = True
break
if not ok:
missing_c_files.append(cSampleName)
#check for Cpp files
if cSampleName.endswith('.cpp'):
ok = False
for file in os.listdir(cppSamplesDir):
if file == cSampleName:
ok = True
break
if not ok:
missing_cpp_files.append(cSampleName)
#check for java files
javaSampleName = javaSampleName.lstrip("java/")
if javaSampleName.endswith('.java'):
ok = False
for file in os.listdir(javaSamplesDir):
if file == javaSampleName:
ok = True
break
if not ok:
missing_java_files.append(javaSampleName)
self.assertEqual( len(missing_java_files) + len(missing_c_files) + len(missing_cpp_files), 0,
"\nThe following files are missing from samples:\n" + \
"\n".join(missing_c_files) + "\n" + \
"\n".join(missing_cpp_files) + "\n" + \
"\n".join(missing_java_files))
if __name__ == '__main__':
u.main()
| Propanu/mraa | tests/check_samplenames.py | Python | mit | 2,291 |
#! python
# A small program to match either a fasta or qual file based on whether the barcode was found or not.
# Need a group file that designates sequences without a recognized barcode as "none".
# To use the program entries should look like the following:
# python matchFastaGroup.py <fastaORqualFile> <groupFilew> <outputfileName.fasta> <thingToAdd>
# Need to add a component that incorporates new mapfile into the workflow
# Load the needed modules for the program
import sys, re
# Read in a Command arguments for files to match
# Input other instructions from here
def commandLine():
commands = sys.argv
fastafile = commands[1]
groupfile = commands[2]
outputfile = commands[3]
addition = commands[4]
return fastafile, groupfile, outputfile, addition
# Read in data and create dictionary
def makeDataArray(inputfile, type):
inputfile = open(inputfile, 'r')
if type == "fasta":
print("Reading in Fasta file.....")
# Create specific environment variables
x = 1
names = []
sequence = []
DataDict = {}
# Read data in and create a dictionary
for line in inputfile:
if x%2 != 0:
newLine = re.sub('>', '', line)
names.append(newLine.strip('\t\n'))
else:
sequence.append(line.strip('\n'))
x = x + 1
inputfile.close()
for i, seqName in enumerate(names):
DataDict[seqName] = sequence[i]
# deal with data that is a group file
elif type == "group":
print("Reading in group file......")
DataDict = {}
for line in inputfile:
seqName, group = line.split('\t')
DataDict[seqName] = group.strip('\n')
# deal with data that is a map file
else:
DataDict = {}
for line in inputfile:
number, seqname = line.split('\t')
DataDict[number] = seqname.strip('\n')
return DataDict
def makeNewFasta(fastaDict, addition, outputfile):
outfile = open(outputfile, 'w')
print("Creating new fasta file......")
for i in fastaDict:
sequence = fastaDict[i]
print(">{0}_{1}\n{2}".format(i, addition, sequence), end ='\n', file = outfile)
outfile.close()
# Make a new group file based on the groups not labelled with "none"
def makeNewGroups(groupDict, addition, outputfile):
NewOuputfile = re.sub('.fasta', '.groups', outputfile)
outfile = open(NewOuputfile, 'w')
print("Creating new group file.......")
for i in groupDict:
group = groupDict[i]
print("{0}_{1}\t{2}".format(i, addition, group), end ='\n', file = outfile)
outfile.close()
# Run the actual program
def main():
fastafile, groupfile, outputfile, addition = commandLine()
fastaDict = makeDataArray(fastafile, "fasta")
groupDict = makeDataArray(groupfile, "group")
makeNewFasta(fastaDict, addition, outputfile)
makeNewGroups(groupDict, addition, outputfile)
print("Complete")
if __name__ == '__main__': main() | marcsze/pythonPrograms | addToSeqName.py | Python | mit | 2,790 |
# Non-dimensional mesh size
mesh_width = 1.0
mesh_height = 0.4 * mesh_width
# Number of cells in each dimension
nx = 8
ny = nx
nz = nx
| mroyluna/summer17mantlesim | mantle_simulation/constants.py | Python | mit | 137 |
from rest_framework.test import APITestCase
from rest_framework import status
from .factories import UserFactory
from django.contrib.auth.models import User
class AuthTestCase(APITestCase):
url = '/auth/'
def setUp(self):
self.user = UserFactory()
self.data = {
'username': self.user.username,
'password': self.user.password
}
self.expected = {
'username': self.user.username,
'email': self.user.email,
}
self.user.set_password(self.user.password)
self.user.save()
def test_login(self):
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, self.expected)
self.assertEqual(User.objects.get(username=self.user.username).username, self.user.username)
def test_logout(self):
self.client.logout()
response = self.client.delete(self.url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(user=self.user)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_wrong_password(self):
self.data['password'] = 'sW3%j34G3'
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['password'] = ''
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_wrong_username(self):
self.data['username'] = self.user.email
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['username'] = ''
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class RegTestCase(APITestCase):
url = '/auth/register/'
def setUp(self):
self.user = UserFactory.build()
self.data = {
'username': self.user.username,
'email': self.user.email,
'password': self.user.password
}
self.expected = {
'username': self.user.username,
'email': self.user.email,
}
def test_create_user(self):
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data, self.expected)
self.assertEqual(User.objects.get(username=self.user.username).username, self.user.username)
def test_not_valid_password(self):
self.data['password'] = '1$aF'
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['password'] = '1213123'
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['password'] = 'password'
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['password'] = ''
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_not_valid_email(self):
self.data['email'] = self.user.username
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['email'] = ''
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_not_valid_username(self):
self.data['username'] = '%$%Fjfk'
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.data['username'] = ''
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| chepe4pi/sokoban_api | sk_auth/tests/tests.py | Python | gpl-2.0 | 4,236 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from datetime import datetime as dt
from bs4 import BeautifulSoup
from unittest import TestCase, main
from pitchpx.game.boxscore import BoxScore
from pitchpx.game.game import Game
from pitchpx.game.players import Players
__author__ = 'Shinichi Nakagawa'
class TestBoxScore(TestCase):
XML_BOXSCORE = """
<?xml version="1.0" encoding="UTF-8"?><!--Copyright 2015 MLB Advanced Media, L.P. Use of any content on this page acknowledges agreement to the terms posted here http://gdx.mlb.com/components/copyright.txt-->
<boxscore game_id="2013/04/29/anamlb-oakmlb-1" game_pk="347121" venue_id="10"
venue_name="O.co Coliseum"
home_sport_code="mlb"
away_team_code="ana"
home_team_code="oak"
away_id="108"
home_id="133"
away_fname="Los Angeles Angels"
home_fname="Oakland Athletics"
away_sname="LA Angels"
home_sname="Oakland"
date="April 29, 2013"
away_wins="9"
away_loss="16"
home_wins="15"
home_loss="12"
status_ind="F">
<linescore away_team_runs="8" home_team_runs="10" away_team_hits="15" home_team_hits="15"
away_team_errors="3"
home_team_errors="1"
note="Two out when winning run scored.">
<inning_line_score away="1" home="0" inning="1"/>
<inning_line_score away="1" home="0" inning="2"/>
<inning_line_score away="0" home="0" inning="3"/>
<inning_line_score away="0" home="1" inning="4"/>
<inning_line_score away="4" home="0" inning="5"/>
<inning_line_score away="0" home="1" inning="6"/>
<inning_line_score away="1" home="0" inning="7"/>
<inning_line_score away="0" home="4" inning="8"/>
<inning_line_score away="0" home="1" inning="9"/>
<inning_line_score away="0" home="0" inning="10"/>
<inning_line_score away="0" home="0" inning="11"/>
<inning_line_score away="0" home="0" inning="12"/>
<inning_line_score away="0" home="0" inning="13"/>
<inning_line_score away="0" home="0" inning="14"/>
<inning_line_score away="1" home="1" inning="15"/>
<inning_line_score away="0" home="0" inning="16"/>
<inning_line_score away="0" home="0" inning="17"/>
<inning_line_score away="0" home="0" inning="18"/>
<inning_line_score away="0" home="2" inning="19"/>
</linescore>
<pitching team_flag="away" out="56" h="15" r="10" er="8" bb="6" so="17" hr="2" bf="77"
era="4.65">
<pitcher id="462102" name="Hanson" name_display_first_last="Tommy Hanson" pos="P"
out="18"
bf="24"
er="1"
r="2"
h="4"
so="6"
hr="1"
bb="1"
np="100"
s="65"
w="2"
l="1"
sv="0"
bs="0"
hld="0"
s_ip="23.0"
s_h="24"
s_r="10"
s_er="9"
s_bb="7"
s_so="14"
game_score="63"
era="3.52"/>
<pitcher id="607706" name="Roth" name_display_first_last="Michael Roth" pos="P" out="3"
bf="6"
er="3"
r="3"
h="3"
so="0"
hr="0"
bb="0"
np="23"
s="13"
w="1"
l="1"
sv="0"
bs="0"
hld="0"
s_ip="9.2"
s_h="14"
s_r="10"
s_er="10"
s_bb="3"
s_so="10"
game_score="35"
era="9.31"/>
<pitcher id="451773" name="De La Rosa, D" name_display_first_last="Dane De La Rosa"
pos="P"
out="2"
bf="4"
er="1"
r="1"
h="1"
so="1"
hr="0"
bb="1"
np="11"
s="5"
w="1"
l="0"
sv="0"
bs="0"
hld="3"
s_ip="12.1"
s_h="11"
s_r="4"
s_er="4"
s_bb="5"
s_so="12"
game_score="46"
era="2.92"
note="(H, 3)"/>
<pitcher id="275933" name="Downs, S" name_display_first_last="Scott Downs" pos="P"
out="0"
bf="1"
er="0"
r="0"
h="1"
so="0"
hr="0"
bb="0"
np="4"
s="2"
w="0"
l="2"
sv="0"
bs="2"
hld="4"
s_ip="10.2"
s_h="10"
s_r="4"
s_er="3"
s_bb="5"
s_so="8"
game_score="48"
era="2.53"/>
<pitcher id="457117" name="Frieri" name_display_first_last="Ernesto Frieri" pos="P"
out="4"
bf="6"
er="1"
r="1"
h="1"
so="2"
hr="0"
bb="1"
np="27"
s="17"
w="0"
l="1"
sv="3"
bs="1"
hld="0"
s_ip="10.2"
s_h="7"
s_r="3"
s_er="3"
s_bb="8"
s_so="14"
game_score="49"
era="2.53"
blown_save="true"
note="(BS, 1)">(BS, 1)</pitcher>
<pitcher id="425532" name="Williams" name_display_first_last="Jerome Williams" pos="P"
out="18"
bf="23"
er="0"
r="1"
h="4"
so="2"
hr="0"
bb="2"
np="73"
s="45"
w="1"
l="0"
sv="0"
bs="0"
hld="0"
s_ip="21.1"
s_h="16"
s_r="7"
s_er="4"
s_bb="6"
s_so="11"
game_score="62"
era="1.69"/>
<pitcher id="543409" name="Kohn" name_display_first_last="Michael Kohn" pos="P" out="6"
bf="6"
er="0"
r="0"
h="0"
so="4"
hr="0"
bb="0"
np="31"
s="20"
w="0"
l="0"
sv="0"
bs="0"
hld="0"
s_ip="4.0"
s_h="3"
s_r="2"
s_er="2"
s_bb="0"
s_so="6"
game_score="60"
era="4.50"/>
<pitcher id="446264" name="Enright" name_display_first_last="Barry Enright" pos="P"
out="5"
bf="7"
er="2"
r="2"
h="1"
so="2"
hr="1"
bb="1"
np="28"
s="18"
w="0"
l="1"
sv="0"
bs="0"
hld="0"
s_ip="1.2"
s_h="1"
s_r="2"
s_er="2"
s_bb="1"
s_so="2"
game_score="46"
era="10.80"
loss="true"
note="(L, 0-1)"/>
</pitching>
<batting team_flag="home" ab="71" r="10" h="15" d="1" t="1" hr="2" rbi="9" bb="6"
po="57"
da="19"
so="17"
lob="30"
avg=".251">
<batter id="424825" name="Crisp" name_display_first_last="Coco Crisp" pos="CF" bo="100"
ab="6"
po="2"
r="2"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="0"
fldg="1.000"
sb="1"
cs="0"
s_hr="5"
s_rbi="12"
s_h="28"
s_bb="17"
s_r="24"
s_so="7"
avg=".283"
go="3"
ao="1"/>
<batter id="474384" name="Freiman" name_display_first_last="Nate Freiman" pos="1B"
bo="101"
ab="2"
po="6"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="1"
fldg="1.000"
sb="0"
cs="0"
s_hr="1"
s_rbi="5"
s_h="4"
s_bb="4"
s_r="2"
s_so="5"
avg=".148"
ao="2"/>
<batter id="452234" name="Smith, S" name_display_first_last="Seth Smith" pos="DH-LF"
bo="200"
ab="8"
po="0"
r="0"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="2"
e="0"
d="0"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="1"
fldg=".000"
sb="0"
cs="0"
s_hr="3"
s_rbi="13"
s_h="24"
s_bb="10"
s_r="12"
s_so="18"
avg=".312"
go="1"
ao="3"/>
<batter id="476704" name="Lowrie" name_display_first_last="Jed Lowrie" pos="SS"
bo="300"
ab="9"
po="2"
r="2"
a="6"
bb="0"
sac="0"
t="0"
sf="0"
h="2"
e="1"
d="1"
hbp="0"
so="1"
hr="0"
rbi="1"
lob="3"
fldg=".889"
sb="0"
cs="0"
s_hr="3"
s_rbi="15"
s_h="34"
s_bb="13"
s_r="18"
s_so="19"
avg=".333"
go="3"
ao="3"/>
<batter id="493316" name="Cespedes" name_display_first_last="Yoenis Cespedes"
pos="LF-CF"
bo="400"
ab="8"
po="6"
r="1"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="4"
hr="0"
rbi="1"
lob="3"
fldg="1.000"
sb="0"
cs="0"
s_hr="4"
s_rbi="10"
s_h="11"
s_bb="7"
s_r="10"
s_so="19"
avg=".208"
go="1"
ao="2"/>
<batter id="461235" name="Moss" name_display_first_last="Brandon Moss" pos="1B-RF"
bo="500"
ab="8"
po="10"
r="3"
a="2"
bb="1"
sac="0"
t="0"
sf="0"
h="3"
e="0"
d="0"
hbp="0"
so="4"
hr="2"
rbi="3"
lob="4"
fldg="1.000"
sb="1"
cs="0"
s_hr="4"
s_rbi="19"
s_h="25"
s_bb="13"
s_r="17"
s_so="28"
avg=".298"
go="1"
ao="0"/>
<batter id="518626" name="Donaldson" name_display_first_last="Josh Donaldson" pos="3B"
bo="600"
ab="7"
po="2"
r="1"
a="3"
bb="1"
sac="0"
t="0"
sf="0"
h="3"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="2"
lob="0"
fldg="1.000"
sb="1"
cs="0"
s_hr="2"
s_rbi="20"
s_h="32"
s_bb="13"
s_r="14"
s_so="13"
avg=".327"
go="1"
ao="2"/>
<batter id="444379" name="Jaso" name_display_first_last="John Jaso" pos="C" bo="700"
ab="2"
po="7"
r="0"
a="1"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="2"
fldg="1.000"
sb="0"
cs="0"
s_hr="1"
s_rbi="9"
s_h="20"
s_bb="6"
s_r="8"
s_so="16"
avg=".260"
go="1"
ao="0"/>
<batter id="519083" name="Norris, D" name_display_first_last="Derek Norris" pos="PH-C"
bo="701"
ab="5"
po="11"
r="1"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="4"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="4"
s_h="11"
s_bb="11"
s_r="11"
s_so="11"
avg=".256"
note="a-"
go="2"
ao="2"/>
<batter id="502210" name="Reddick" name_display_first_last="Josh Reddick" pos="RF"
bo="800"
ab="3"
po="4"
r="0"
a="1"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="3"
fldg="1.000"
sb="0"
cs="0"
s_hr="1"
s_rbi="13"
s_h="11"
s_bb="9"
s_r="9"
s_so="21"
avg=".147"
go="2"
ao="1"/>
<batter id="455759" name="Young, C" name_display_first_last="Chris Young"
pos="PH-RF-CF"
bo="801"
ab="4"
po="2"
r="0"
a="0"
bb="0"
sac="0"
t="1"
sf="0"
h="2"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="1"
lob="4"
fldg="1.000"
sb="0"
cs="0"
s_hr="4"
s_rbi="15"
s_h="15"
s_bb="12"
s_r="12"
s_so="20"
avg=".172"
note="c-"
go="4"
ao="0"
gidp="2"/>
<batter id="474463" name="Anderson, B" name_display_first_last="Brett Anderson" pos="P"
bo="802"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="460283" name="Blevins" name_display_first_last="Jerry Blevins" pos="P"
bo="803"
ab="1"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="1"
avg=".000"
ao="0"/>
<batter id="519299" name="Sogard" name_display_first_last="Eric Sogard" pos="2B"
bo="900"
ab="2"
po="2"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="2"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="2"
s_h="15"
s_bb="5"
s_r="8"
s_so="10"
avg=".231"
ao="1"/>
<batter id="489267" name="Rosales" name_display_first_last="Adam Rosales" pos="PH-2B"
bo="901"
ab="6"
po="3"
r="0"
a="5"
bb="0"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="1"
lob="3"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="1"
s_h="4"
s_bb="0"
s_r="1"
s_so="2"
avg=".250"
note="b-"
go="3"
ao="2"/>
<batter id="573185" name="Straily" name_display_first_last="Dan Straily" pos="P" ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="450212" name="Neshek" name_display_first_last="Pat Neshek" pos="P" ab="0"
po="0"
r="0"
a="1"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="434592" name="Resop" name_display_first_last="Chris Resop" pos="P" ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="448281" name="Doolittle" name_display_first_last="Sean Doolittle" pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="346797" name="Balfour" name_display_first_last="Grant Balfour" pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="475857" name="Cook" name_display_first_last="Ryan Cook" pos="P" ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<note><![CDATA[<span>a-Grounded out for Jaso in the 7th. b-Grounded out for Sogard in the 7th. c-Singled for Reddick in the 8th. </span>]]></note>
<text_data><![CDATA[<b>BATTING</b><br/><span>
<b>2B</b>: Lowrie (11, Williams).</span><br/><span>
<b>3B</b>: Young, C (1, Williams).</span><br/><span>
<b>HR</b>: Moss 2 (4, 6th inning off Hanson, 0 on, 2 out; 19th inning off Enright, 1 on, 2 out).</span><br/><span>
<b>TB</b>: Young, C 4; Donaldson 3; Moss 9; Lowrie 3; Crisp; Cespedes; Smith, S 2; Rosales.</span><br/><span>
<b>RBI</b>: Moss 3 (19), Lowrie (15), Donaldson 2 (20), Young, C (15), Cespedes (10), Rosales (1).</span><br/><span>
<b>2-out RBI</b>: Moss 3; Young, C; Cespedes; Rosales.</span><br/><span>
<b>Runners left in scoring position, 2 out</b>: Cespedes 2; Sogard; Rosales 2; Freiman.</span><br/><span>
<b>GIDP</b>: Young, C 2.</span><br/><b>Team RISP</b>: 5-for-20.<br/><b>Team LOB</b>: 11.<br/><br/><b>BASERUNNING</b><br/><span>
<b>SB</b>: Moss (1, 2nd base off Hanson/Iannetta), Donaldson (2, 2nd base off Hanson/Iannetta), Crisp (8, 3rd base off Frieri/Iannetta).</span><br/><br/><b>FIELDING</b><br/><span>
<b>E</b>: Lowrie (5, fielding).</span><br/><span>
<b>Outfield assists</b>: Reddick (Kendrick, H at 2nd base), Moss (Shuck at 2nd base).</span><br/><br/>]]></text_data>
<note_es><![CDATA[<span>a-Bateó por Jaso en la 7th. b-Bateó por Sogard en la 7th. c-Bateó por Reddick en la 8th. </span>]]></note_es>
<text_data_es><![CDATA[<b>Bateo</b><br/><span>
<b>2B</b>: Lowrie (11, Williams).</span><br/><span>
<b>3B</b>: Young, C (1, Williams).</span><br/><span>
<b>HR</b>: Moss 2 (4, 6th entrada ante Hanson, 0 en base, 2 out; 19th entrada ante Enright, 1 en base, 2 out).</span><br/><span>
<b>BA</b>: Young, C 4; Donaldson 3; Moss 9; Lowrie 3; Crisp; Cespedes; Smith, S 2; Rosales.</span><br/><span>
<b>RBI</b>: Moss 3 (19), Lowrie (15), Donaldson 2 (20), Young, C (15), Cespedes (10), Rosales (1).</span><br/><span>
<b>2-out RBI</b>: Moss 3; Young, C; Cespedes; Rosales.</span><br/><span>
<b>Corredores dejados en circulación, 2 out</b>: Cespedes 2; Sogard; Rosales 2; Freiman.</span><br/><span>
<b>RDP</b>: Young, C 2.</span><br/><b>Equipo con Corredores en Posición de Anotar</b>:
de 5-20.<br/><b>Equipo con Corredores Dejados en Circulación</b>: 11.<br/><br/><b>Corrido de Bases</b><br/><span>
<b>SB</b>: Moss (1, 2nd base a Hanson/Iannetta), Donaldson (2, 2nd base a Hanson/Iannetta), Crisp (8, 3rd base a Frieri/Iannetta).</span><br/><br/><b>Defensa</b><br/><span>
<b>E</b>: Lowrie (5, fielding).</span><br/><span>
<b>Asistencias Desde los Jardines</b>: Reddick (Kendrick, H en 2nd base), Moss (Shuck en 2nd base).</span><br/><br/>]]></text_data_es>
</batting>
<pitching team_flag="home" out="57" h="15" r="8" er="8" bb="6" so="18" hr="3" bf="79"
era="4.28">
<pitcher id="573185" name="Straily" name_display_first_last="Dan Straily" pos="P"
out="14"
bf="23"
er="6"
r="6"
h="7"
so="6"
hr="2"
bb="1"
np="88"
s="55"
w="1"
l="0"
sv="0"
bs="0"
hld="0"
s_ip="11.1"
s_h="12"
s_r="8"
s_er="8"
s_bb="1"
s_so="17"
game_score="31"
era="6.35"/>
<pitcher id="450212" name="Neshek" name_display_first_last="Pat Neshek" pos="P" out="7"
bf="9"
er="1"
r="1"
h="1"
so="2"
hr="1"
bb="1"
np="28"
s="19"
w="0"
l="0"
sv="0"
bs="0"
hld="0"
s_ip="11.1"
s_h="13"
s_r="6"
s_er="4"
s_bb="7"
s_so="8"
game_score="52"
era="3.18"/>
<pitcher id="434592" name="Resop" name_display_first_last="Chris Resop" pos="P" out="5"
bf="7"
er="0"
r="0"
h="1"
so="1"
hr="0"
bb="1"
np="34"
s="19"
w="1"
l="0"
sv="0"
bs="0"
hld="0"
s_ip="12.2"
s_h="16"
s_r="7"
s_er="6"
s_bb="7"
s_so="8"
game_score="53"
era="4.26"/>
<pitcher id="448281" name="Doolittle" name_display_first_last="Sean Doolittle" pos="P"
out="1"
bf="1"
er="0"
r="0"
h="0"
so="1"
hr="0"
bb="0"
np="4"
s="3"
w="1"
l="0"
sv="0"
bs="1"
hld="2"
s_ip="10.0"
s_h="5"
s_r="2"
s_er="2"
s_bb="3"
s_so="10"
game_score="52"
era="1.80"/>
<pitcher id="346797" name="Balfour" name_display_first_last="Grant Balfour" pos="P"
out="6"
bf="9"
er="0"
r="0"
h="2"
so="2"
hr="0"
bb="1"
np="31"
s="20"
w="0"
l="0"
sv="3"
bs="0"
hld="0"
s_ip="11.0"
s_h="10"
s_r="3"
s_er="3"
s_bb="4"
s_so="10"
game_score="53"
era="2.45"/>
<pitcher id="475857" name="Cook" name_display_first_last="Ryan Cook" pos="P" out="3"
bf="3"
er="0"
r="0"
h="0"
so="1"
hr="0"
bb="0"
np="11"
s="8"
w="1"
l="0"
sv="0"
bs="0"
hld="3"
s_ip="13.0"
s_h="7"
s_r="4"
s_er="3"
s_bb="6"
s_so="14"
game_score="54"
era="2.08"/>
<pitcher id="474463" name="Anderson, B" name_display_first_last="Brett Anderson" pos="P"
out="16"
bf="21"
er="1"
r="1"
h="3"
so="5"
hr="0"
bb="2"
np="79"
s="49"
w="1"
l="4"
sv="0"
bs="0"
hld="0"
s_ip="29.0"
s_h="32"
s_r="22"
s_er="20"
s_bb="15"
s_so="29"
game_score="61"
era="6.21"/>
<pitcher id="460283" name="Blevins" name_display_first_last="Jerry Blevins" pos="P"
out="5"
bf="6"
er="0"
r="0"
h="1"
so="0"
hr="0"
bb="0"
np="25"
s="15"
w="2"
l="0"
sv="0"
bs="1"
hld="0"
s_ip="16.1"
s_h="10"
s_r="3"
s_er="3"
s_bb="1"
s_so="16"
game_score="53"
era="1.65"
win="true"
note="(W, 2-0)"/>
</pitching>
<batting team_flag="away" ab="70" r="8" h="15" d="2" t="0" hr="3" rbi="8" bb="6" po="56"
da="20"
so="18"
lob="22"
avg=".263">
<batter id="488721" name="Bourjos" name_display_first_last="Peter Bourjos" pos="CF"
bo="100"
ab="4"
po="1"
r="1"
a="0"
bb="0"
sac="1"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="1"
so="2"
hr="0"
rbi="0"
lob="1"
fldg="1.000"
sb="0"
cs="0"
s_hr="2"
s_rbi="8"
s_h="26"
s_bb="4"
s_r="12"
s_so="20"
avg=".313"
go="1"
ao="2"/>
<batter id="543776" name="Shuck" name_display_first_last="J.B. Shuck" pos="LF" bo="101"
ab="2"
po="0"
r="0"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="1"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="2"
s_h="6"
s_bb="2"
s_r="1"
s_so="3"
avg=".429"
go="1"
ao="0"/>
<batter id="545361" name="Trout" name_display_first_last="Mike Trout" pos="LF-CF"
bo="200"
ab="8"
po="7"
r="1"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="5"
fldg="1.000"
sb="0"
cs="0"
s_hr="2"
s_rbi="12"
s_h="27"
s_bb="12"
s_r="14"
s_so="25"
avg=".252"
go="3"
ao="2"/>
<batter id="405395" name="Pujols" name_display_first_last="Albert Pujols" pos="1B"
bo="300"
ab="8"
po="15"
r="3"
a="4"
bb="1"
sac="0"
t="0"
sf="0"
h="4"
e="1"
d="0"
hbp="0"
so="0"
hr="2"
rbi="3"
lob="1"
fldg=".950"
sb="0"
cs="0"
s_hr="4"
s_rbi="16"
s_h="26"
s_bb="15"
s_r="12"
s_so="12"
avg=".265"
go="1"
ao="3"/>
<batter id="285078" name="Hamilton" name_display_first_last="Josh Hamilton" pos="RF"
bo="400"
ab="8"
po="7"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="1"
h="0"
e="0"
d="0"
hbp="0"
so="3"
hr="0"
rbi="1"
lob="4"
fldg="1.000"
sb="0"
cs="0"
s_hr="2"
s_rbi="9"
s_h="21"
s_bb="5"
s_r="10"
s_so="32"
avg=".202"
go="1"
ao="5"/>
<batter id="444432" name="Trumbo" name_display_first_last="Mark Trumbo" pos="DH"
bo="500"
ab="8"
po="0"
r="1"
a="0"
bb="1"
sac="0"
t="0"
sf="0"
h="3"
e="0"
d="1"
hbp="0"
so="2"
hr="1"
rbi="3"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="4"
s_rbi="15"
s_h="32"
s_bb="8"
s_r="12"
s_so="28"
avg=".302"
go="3"
ao="0"/>
<batter id="435062" name="Kendrick, H" name_display_first_last="Howie Kendrick"
pos="2B"
bo="600"
ab="9"
po="4"
r="0"
a="5"
bb="0"
sac="0"
t="0"
sf="0"
h="2"
e="1"
d="0"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="3"
fldg=".900"
sb="0"
cs="0"
s_hr="3"
s_rbi="14"
s_h="28"
s_bb="6"
s_r="8"
s_so="17"
avg=".283"
go="3"
ao="2"/>
<batter id="430593" name="Harris, B" name_display_first_last="Brendan Harris"
pos="SS-3B"
bo="700"
ab="9"
po="2"
r="1"
a="6"
bb="0"
sac="0"
t="0"
sf="0"
h="2"
e="1"
d="1"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="3"
fldg=".889"
sb="0"
cs="0"
s_hr="1"
s_rbi="4"
s_h="14"
s_bb="2"
s_r="6"
s_so="12"
avg=".264"
go="3"
ao="2"/>
<batter id="455104" name="Iannetta" name_display_first_last="Chris Iannetta" pos="C"
bo="800"
ab="6"
po="18"
r="0"
a="0"
bb="2"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="1"
fldg="1.000"
sb="0"
cs="0"
s_hr="3"
s_rbi="9"
s_h="16"
s_bb="11"
s_r="11"
s_so="19"
avg=".225"
ao="4"/>
<batter id="499864" name="Jimenez, L" name_display_first_last="Luis Jimenez" pos="3B"
bo="900"
ab="3"
po="0"
r="1"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="1"
hr="0"
rbi="0"
lob="1"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="2"
s_h="15"
s_bb="2"
s_r="10"
s_so="20"
avg=".273"
ao="1"/>
<batter id="461865" name="Romine, A" name_display_first_last="Andrew Romine" pos="SS"
bo="901"
ab="5"
po="0"
r="0"
a="3"
bb="0"
sac="0"
t="0"
sf="0"
h="1"
e="0"
d="0"
hbp="0"
so="2"
hr="0"
rbi="0"
lob="3"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="1"
s_h="3"
s_bb="1"
s_r="1"
s_so="6"
avg=".130"
go="1"
ao="1"/>
<batter id="462102" name="Hanson" name_display_first_last="Tommy Hanson" pos="P" ab="0"
po="2"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="607706" name="Roth" name_display_first_last="Michael Roth" pos="P" ab="0"
po="0"
r="0"
a="1"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="451773" name="De La Rosa, D" name_display_first_last="Dane De La Rosa"
pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="275933" name="Downs, S" name_display_first_last="Scott Downs" pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="457117" name="Frieri" name_display_first_last="Ernesto Frieri" pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="425532" name="Williams" name_display_first_last="Jerome Williams" pos="P"
ab="0"
po="0"
r="0"
a="1"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg="1.000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="543409" name="Kohn" name_display_first_last="Michael Kohn" pos="P" ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<batter id="446264" name="Enright" name_display_first_last="Barry Enright" pos="P"
ab="0"
po="0"
r="0"
a="0"
bb="0"
sac="0"
t="0"
sf="0"
h="0"
e="0"
d="0"
hbp="0"
so="0"
hr="0"
rbi="0"
lob="0"
fldg=".000"
sb="0"
cs="0"
s_hr="0"
s_rbi="0"
s_h="0"
s_bb="0"
s_r="0"
s_so="0"
avg=".000"
ao="0"/>
<text_data><![CDATA[<b>BATTING</b><br/><span>
<b>2B</b>: Trumbo (8, Straily), Harris, B (4, Anderson, B).</span><br/><span>
<b>HR</b>: Pujols 2 (4, 1st inning off Straily, 0 on, 2 out; 7th inning off Neshek, 0 on, 1 out), Trumbo (4, 2nd inning off Straily, 0 on, 0 out).</span><br/><span>
<b>TB</b>: Shuck; Trout; Harris, B 3; Kendrick, H 2; Jimenez, L; Trumbo 7; Romine, A; Pujols 10.</span><br/><span>
<b>RBI</b>: Pujols 3 (16), Trumbo 3 (15), Hamilton (9), Shuck (2).</span><br/><span>
<b>2-out RBI</b>: Pujols; Trumbo 2; Shuck.</span><br/><span>
<b>Runners left in scoring position, 2 out</b>: Harris, B; Kendrick, H; Hamilton 2; Trout 2.</span><br/><span>
<b>SAC</b>: Bourjos.</span><br/><span>
<b>SF</b>: Hamilton.</span><br/><b>Team RISP</b>: 3-for-10.<br/><b>Team LOB</b>: 14.<br/><br/><b>FIELDING</b><br/><span>
<b>E</b>: Harris, B (2, interference), Kendrick, H (5, fielding), Pujols (2, missed catch).</span><br/><span>
<b>PB</b>: Iannetta (2).</span><br/><span>
<b>DP</b>: 2 (Romine, A-Kendrick, H-Pujols, Harris, B-Pujols).</span><br/><br/>]]></text_data>
<text_data_es><![CDATA[<b>Bateo</b><br/><span>
<b>2B</b>: Trumbo (8, Straily), Harris, B (4, Anderson, B).</span><br/><span>
<b>HR</b>: Pujols 2 (4, 1st entrada ante Straily, 0 en base, 2 out; 7th entrada ante Neshek, 0 en base, 1 out), Trumbo (4, 2nd entrada ante Straily, 0 en base, 0 out).</span><br/><span>
<b>BA</b>: Shuck; Trout; Harris, B 3; Kendrick, H 2; Jimenez, L; Trumbo 7; Romine, A; Pujols 10.</span><br/><span>
<b>RBI</b>: Pujols 3 (16), Trumbo 3 (15), Hamilton (9), Shuck (2).</span><br/><span>
<b>2-out RBI</b>: Pujols; Trumbo 2; Shuck.</span><br/><span>
<b>Corredores dejados en circulación, 2 out</b>: Harris, B; Kendrick, H; Hamilton 2; Trout 2.</span><br/><span>
<b>SAC</b>: Bourjos.</span><br/><span>
<b>SF</b>: Hamilton.</span><br/><b>Equipo con Corredores en Posición de Anotar</b>:
de 3-10.<br/><b>Equipo con Corredores Dejados en Circulación</b>: 14.<br/><br/><b>Defensa</b><br/><span>
<b>E</b>: Harris, B (2, interference), Kendrick, H (5, fielding), Pujols (2, missed catch).</span><br/><span>
<b>PB</b>: Iannetta (2).</span><br/><span>
<b>DP</b>: 2 (Romine, A-Kendrick, H-Pujols, Harris, B-Pujols).</span><br/><br/>]]></text_data_es>
</batting>
<game_info><![CDATA[<span>Roth pitched to 3 batters in the 8th.</span><br/><span>Downs, S pitched to 1 batter in the 8th.</span><br/><br/><span>
<b>Game Scores</b>: Hanson 63, Straily 31.</span><br/><span>
<b>IBB</b>: Pujols (by Balfour), Iannetta (by Anderson, B).</span><br/><span>
<b>HBP</b>: Bourjos (by Straily).</span><br/><span>
<b>Pitches-strikes</b>: Hanson 100-65, Roth 23-13, De La Rosa, D 11-5, Downs, S 4-2, Frieri 27-17, Williams 73-45, Kohn 31-20, Enright 28-18, Straily 88-55, Neshek 28-19, Resop 34-19, Doolittle 4-3, Balfour 31-20, Cook 11-8, Anderson, B 79-49, Blevins 25-15.</span><br/><span>
<b>Groundouts-flyouts</b>: Hanson 7-2, Roth 3-0, De La Rosa, D 1-0, Downs, S 0-0, Frieri 0-2, Williams 7-5, Kohn 1-1, Enright 1-1, Straily 3-3, Neshek 1-0, Resop 1-1, Doolittle 0-0, Balfour 1-2, Cook 2-0, Anderson, B 7-4, Blevins 2-2.</span><br/><span>
<b>Batters faced</b>: Hanson 24, Roth 6, De La Rosa, D 4, Downs, S 1, Frieri 6, Williams 23, Kohn 6, Enright 7, Straily 23, Neshek 9, Resop 7, Doolittle 1, Balfour 9, Cook 3, Anderson, B 21, Blevins 6.</span><br/><span>
<b>Inherited runners-scored</b>: De La Rosa, D 2-1, Downs, S 2-1, Frieri 2-0, Neshek 1-0, Doolittle 2-0, Blevins 1-0.</span><br/><b>Umpires</b>: HP: Kerwin Danley. 1B: Vic Carapazza. 2B: Gary Cederstrom. 3B: Lance Barksdale. <br/><b>Weather</b>: 71 degrees, partly cloudy.<br/><b>Wind</b>: 16 mph, Out to RF.<br/><b>T</b>: 6:32.<br/><b>Att</b>: 11,668.<br/><b>Venue</b>: O.co Coliseum.<br/><b>April 29, 2013</b><br/>]]></game_info>
<game_info_es><![CDATA[<span>Roth enfrentó 3 bateadores en la 8th.</span><br/><span>Downs, S enfrentó 1 bateador en la 8th.</span><br/><br/><span>
<b>Anotaciones del Juego</b>: Hanson 63, Straily 31.</span><br/><span>
<b>BBI</b>: Pujols (por Balfour), Iannetta (por Anderson, B).</span><br/><span>
<b>BG</b>: Bourjos (por Straily).</span><br/><span>
<b>Lanzamientos-strikes</b>: Hanson 100-65, Roth 23-13, De La Rosa, D 11-5, Downs, S 4-2, Frieri 27-17, Williams 73-45, Kohn 31-20, Enright 28-18, Straily 88-55, Neshek 28-19, Resop 34-19, Doolittle 4-3, Balfour 31-20, Cook 11-8, Anderson, B 79-49, Blevins 25-15.</span><br/><span>
<b>Roletazos-elevados de out</b>: Hanson 7-2, Roth 3-0, De La Rosa, D 1-0, Downs, S 0-0, Frieri 0-2, Williams 7-5, Kohn 1-1, Enright 1-1, Straily 3-3, Neshek 1-0, Resop 1-1, Doolittle 0-0, Balfour 1-2, Cook 2-0, Anderson, B 7-4, Blevins 2-2.</span><br/><span>
<b>Bateadores enfrentados</b>: Hanson 24, Roth 6, De La Rosa, D 4, Downs, S 1, Frieri 6, Williams 23, Kohn 6, Enright 7, Straily 23, Neshek 9, Resop 7, Doolittle 1, Balfour 9, Cook 3, Anderson, B 21, Blevins 6.</span><br/><span>
<b>Corredores Heredados que Anotaron</b>: De La Rosa, D 2-1, Downs, S 2-1, Frieri 2-0, Neshek 1-0, Doolittle 2-0, Blevins 1-0.</span><br/><b>Árbitros</b>: HP: Kerwin Danley. 1B: Vic Carapazza. 2B: Gary Cederstrom. 3B: Lance Barksdale. <br/><b>Clima</b>: 71 degrees, partly cloudy.<br/><b>Viento</b>: 16 mph, Out to RF.<br/><b>T</b>: 6:32.<br/><b>Att</b>: 11,668.<br/><b>Estadio</b>: O.co Coliseum.<br/><b>April 29, 2013</b><br/>]]></game_info_es>
</boxscore>
"""
XML_GAME = """
<!--Copyright 2015 MLB Advanced Media, L.P. Use of any content on this page acknowledges agreement to the terms posted here http://gdx.mlb.com/components/copyright.txt--><game type="R" local_game_time="19:05" game_pk="347121" game_time_et="10:05 PM" gameday_sw="P">
<team type="home" code="oak" file_code="oak" abbrev="OAK" id="133" name="Oakland" name_full="Oakland Athletics" name_brief="Athletics" w="15" l="12" division_id="200" league_id="103" league="AL"/>
<team type="away" code="ana" file_code="ana" abbrev="LAA" id="108" name="LA Angels" name_full="Los Angeles Angels" name_brief="Angels" w="9" l="16" division_id="200" league_id="103" league="AL"/>
<stadium id="10" name="O.co Coliseum" venue_w_chan_loc="USCA0791" location="Oakland, CA"/>
</game>
"""
XML_PLAYERS = """
<!--Copyright 2015 MLB Advanced Media, L.P. Use of any content on this page acknowledges agreement to the terms posted here http://gdx.mlb.com/components/copyright.txt--><game venue="O.co Coliseum" date="April 29, 2013">
<team type="away" id="LAA" name="Los Angeles Angels">
<player id="275933" first="Scott" last="Downs" num="37" boxname="Downs, S" rl="L" bats="L" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="2" era="2.53"/>
<player id="285078" first="Josh" last="Hamilton" num="32" boxname="Hamilton" rl="L" bats="L" position="LF" current_position="RF" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="4" game_position="RF" avg=".219" hr="2" rbi="8"/>
<player id="405395" first="Albert" last="Pujols" num="5" boxname="Pujols" rl="R" bats="R" position="1B" current_position="1B" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="3" game_position="1B" avg=".244" hr="2" rbi="13"/>
<player id="425492" first="Ryan" last="Madson" num="46" boxname="Madson" rl="R" bats="L" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="-"/>
<player id="425532" first="Jerome" last="Williams" num="57" boxname="Williams" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="2.35"/>
<player id="430593" first="Brendan" last="Harris" num="20" boxname="Harris, B" rl="R" bats="R" position="SS" current_position="3B" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="7" game_position="SS" avg=".273" hr="1" rbi="4"/>
<player id="430599" first="Joe" last="Blanton" num="55" boxname="Blanton" rl="R" bats="R" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="4" era="7.09"/>
<player id="430634" first="Sean" last="Burnett" num="24" boxname="Burnett, S" rl="L" bats="L" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="1.04"/>
<player id="430947" first="Erick" last="Aybar" num="2" boxname="Aybar" rl="R" bats="S" position="SS" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".321" hr="0" rbi="1"/>
<player id="430948" first="Alberto" last="Callaspo" num="6" boxname="Callaspo" rl="R" bats="S" position="3B" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".273" hr="1" rbi="3"/>
<player id="435062" first="Howie" last="Kendrick" num="47" boxname="Kendrick, H" rl="R" bats="R" position="2B" current_position="2B" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="6" game_position="2B" avg=".289" hr="3" rbi="14"/>
<player id="444432" first="Mark" last="Trumbo" num="44" boxname="Trumbo" rl="R" bats="R" position="1B" current_position="DH" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="5" game_position="DH" avg=".296" hr="3" rbi="12"/>
<player id="446264" first="Barry" last="Enright" num="45" boxname="Enright" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="-"/>
<player id="448178" first="Kevin" last="Jepsen" num="40" boxname="Jepsen" rl="R" bats="R" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="1" era="9.82"/>
<player id="450275" first="Mark" last="Lowe" num="38" boxname="Lowe, M" rl="R" bats="L" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="11.37"/>
<player id="450306" first="Jason" last="Vargas" num="60" boxname="Vargas" rl="L" bats="L" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="3" era="4.85"/>
<player id="450308" first="Jered" last="Weaver" num="36" boxname="Weaver" rl="R" bats="R" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="1" era="4.91"/>
<player id="450351" first="C.J." last="Wilson" num="33" boxname="Wilson, C" rl="L" bats="L" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="2" losses="0" era="4.30"/>
<player id="451773" first="Dane" last="De La Rosa" num="65" boxname="De La Rosa, D" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="2.31"/>
<player id="455104" first="Chris" last="Iannetta" num="17" boxname="Iannetta" rl="R" bats="R" position="C" current_position="C" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="8" game_position="C" avg=".246" hr="3" rbi="9"/>
<player id="457117" first="Ernesto" last="Frieri" num="49" boxname="Frieri" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="1" era="1.93"/>
<player id="461865" first="Andrew" last="Romine" num="7" boxname="Romine, A" rl="R" bats="S" position="SS" current_position="SS" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".111" hr="0" rbi="1"/>
<player id="462102" first="Tommy" last="Hanson" num="48" boxname="Hanson" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="0" game_position="P" avg=".000" hr="0" rbi="0" wins="2" losses="1" era="4.24"/>
<player id="474233" first="Hank" last="Conger" num="16" boxname="Conger" rl="R" bats="S" position="C" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".227" hr="1" rbi="3"/>
<player id="476531" first="Andrew" last="Taylor" num="64" boxname="Taylor, A" rl="L" bats="R" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="-"/>
<player id="488721" first="Peter" last="Bourjos" num="25" boxname="Bourjos" rl="R" bats="R" position="CF" current_position="CF" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="1" game_position="CF" avg=".329" hr="2" rbi="8"/>
<player id="499864" first="Luis" last="Jimenez" num="18" boxname="Jimenez, L" rl="R" bats="R" position="3B" current_position="3B" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="9" game_position="3B" avg=".269" hr="0" rbi="2"/>
<player id="543409" first="Michael" last="Kohn" num="58" boxname="Kohn" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="9.00"/>
<player id="543488" first="Nick" last="Maronde" num="63" boxname="Maronde" rl="L" bats="S" position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="3.86"/>
<player id="543776" first="J.B." last="Shuck" num="39" boxname="Shuck" rl="L" bats="L" position="LF" current_position="LF" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".417" hr="0" rbi="1"/>
<player id="545361" first="Mike" last="Trout" num="27" boxname="Trout" rl="R" bats="R" position="CF" current_position="CF" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" bat_order="2" game_position="LF" avg=".263" hr="2" rbi="12"/>
<player id="607706" first="Michael" last="Roth" num="51" boxname="Roth" rl="L" bats="L" position="P" current_position="P" status="A" team_abbrev="LAA" team_id="108" parent_team_abbrev="LAA" parent_team_id="108" avg=".000" hr="0" rbi="0" wins="1" losses="1" era="7.27"/>
<coach position="manager" first="Mike" last="Scioscia" id="121919" num="14"/><coach position="batting_coach" first="Jim" last="Eppard" id="113867" num="80"/><coach position="pitching_coach" first="Mike" last="Butcher" id="111807" num="23"/><coach position="first_base_coach" first="Alfredo" last="Griffin" id="115137" num="4"/><coach position="third_base_coach" first="Dino" last="Ebel" id="492822" num="21"/><coach position="bench_coach" first="Rob" last="Picciolo" id="120537" num="9"/><coach position="infield_coach" first="Bobby" last="Knoop" id="117202" num="1"/><coach position="bullpen_coach" first="Steve" last="Soliz" id="150354" num="61"/><coach position="bullpen_catcher" first="Tom" last="Gregorio" id="408054" num="70"/><coach position="catching_coach" first="Bill" last="Lachemann" id="628306" num=""/><coach position="coach" first="Shayne" last="Kelley" id="648317" num=""/> </team>
<team type="home" id="OAK" name="Oakland Athletics">
<player id="112526" first="Bartolo" last="Colon" num="40" boxname="Colon" rl="R" bats="R" position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="3" losses="0" era="3.38"/>
<player id="346797" first="Grant" last="Balfour" num="50" boxname="Balfour" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="3.00"/>
<player id="424825" first="Coco" last="Crisp" num="4" boxname="Crisp" rl="R" bats="S" position="CF" current_position="CF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="1" game_position="CF" avg=".290" hr="5" rbi="12"/>
<player id="434592" first="Chris" last="Resop" num="44" boxname="Resop" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="4.91"/>
<player id="444379" first="John" last="Jaso" num="5" boxname="Jaso" rl="R" bats="L" position="C" current_position="C" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="7" game_position="C" avg=".267" hr="1" rbi="9"/>
<player id="448281" first="Sean" last="Doolittle" num="62" boxname="Doolittle" rl="L" bats="L" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="1.86"/>
<player id="450212" first="Pat" last="Neshek" num="47" boxname="Neshek" rl="R" bats="S" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="3.00"/>
<player id="451775" first="Fernando" last="Rodriguez" num="33" boxname="Rodriguez, Fe" rl="R" bats="R" position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="0" losses="0" era="-"/>
<player id="452234" first="Seth" last="Smith" num="15" boxname="Smith, S" rl="L" bats="L" position="LF" current_position="LF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="2" game_position="DH" avg=".319" hr="3" rbi="13"/>
<player id="455759" first="Chris" last="Young" num="25" boxname="Young, C" rl="R" bats="R" position="CF" current_position="CF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".157" hr="4" rbi="14"/>
<player id="456167" first="A.J." last="Griffin" num="64" boxname="Griffin" rl="R" bats="R" position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="2" losses="2" era="4.65"/>
<player id="460283" first="Jerry" last="Blevins" num="13" boxname="Blevins" rl="L" bats="L" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="1.84"/>
<player id="461235" first="Brandon" last="Moss" num="37" boxname="Moss" rl="R" bats="L" position="1B" current_position="RF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="5" game_position="1B" avg=".289" hr="2" rbi="16"/>
<player id="474384" first="Nate" last="Freiman" num="7" boxname="Freiman" rl="R" bats="R" position="1B" current_position="1B" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".160" hr="1" rbi="5"/>
<player id="474463" first="Brett" last="Anderson" num="49" boxname="Anderson, B" rl="L" bats="L" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="1" losses="4" era="7.23"/>
<player id="475857" first="Ryan" last="Cook" num="48" boxname="Cook" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="2.25"/>
<player id="476704" first="Jed" last="Lowrie" num="8" boxname="Lowrie" rl="R" bats="S" position="SS" current_position="SS" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="3" game_position="SS" avg=".344" hr="3" rbi="14"/>
<player id="489267" first="Adam" last="Rosales" num="17" boxname="Rosales" rl="R" bats="R" position="SS" current_position="2B" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".300" hr="0" rbi="0"/>
<player id="493141" first="Hiroyuki" last="Nakajima" num="3" boxname="Nakajima" rl="R" bats="R" position="SS" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0"/>
<player id="493316" first="Yoenis" last="Cespedes" num="52" boxname="Cespedes" rl="R" bats="R" position="LF" current_position="CF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="4" game_position="LF" avg=".222" hr="4" rbi="9"/>
<player id="502003" first="Scott" last="Sizemore" num="29" boxname="Sizemore" rl="R" bats="R" position="2B" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".167" hr="0" rbi="0"/>
<player id="502210" first="Josh" last="Reddick" num="16" boxname="Reddick" rl="R" bats="L" position="RF" current_position="RF" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="8" game_position="RF" avg=".153" hr="1" rbi="13"/>
<player id="518626" first="Josh" last="Donaldson" num="20" boxname="Donaldson" rl="R" bats="R" position="3B" current_position="3B" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="6" game_position="3B" avg=".319" hr="2" rbi="18"/>
<player id="519083" first="Derek" last="Norris" num="36" boxname="Norris, D" rl="R" bats="R" position="C" current_position="C" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".289" hr="0" rbi="4"/>
<player id="519105" first="Jarrod" last="Parker" num="11" boxname="Parker" rl="R" bats="R" position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="0" losses="4" era="8.10"/>
<player id="519299" first="Eric" last="Sogard" num="28" boxname="Sogard" rl="R" bats="L" position="2B" current_position="2B" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="9" game_position="2B" avg=".238" hr="0" rbi="2"/>
<player id="543548" first="Tommy" last="Milone" num="57" boxname="Milone" rl="L" bats="L" position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" avg=".000" hr="0" rbi="0" wins="3" losses="2" era="3.38"/>
<player id="573185" first="Dan" last="Straily" num="67" boxname="Straily" rl="R" bats="R" position="P" current_position="P" status="A" team_abbrev="OAK" team_id="133" parent_team_abbrev="OAK" parent_team_id="133" bat_order="0" game_position="P" avg=".000" hr="0" rbi="0" wins="1" losses="0" era="2.70"/>
<coach position="manager" first="Bob" last="Melvin" id="118942" num="6"/><coach position="hitting_coach" first="Chili" last="Davis" id="113099" num="30"/><coach position="pitching_coach" first="Curt" last="Young" id="124689" num="41"/><coach position="first_base_coach" first="Tye" last="Waller" id="123868" num="46"/><coach position="third_base_coach" first="Mike" last="Gallego" id="114545" num="2"/><coach position="bench_coach" first="Chip" last="Hale" id="115330" num="14"/><coach position="bullpen_coach" first="Darren" last="Bush" id="470252" num="51"/><coach position="coach" first="Ariel" last="Prieto" id="120768" num="59"/> </team>
<umpires><umpire position="home" name="Kerwin Danley" id="427095" first="Kerwin" last="Danley"/><umpire position="first" name="Vic Carapazza" id="483569" first="Vic" last="Carapazza"/><umpire position="second" name="Gary Cederstrom" id="427058" first="Gary" last="Cederstrom"/><umpire position="third" name="Lance Barksdale" id="427013" first="Lance" last="Barksdale"/></umpires></game>
"""
def setUp(self):
self.game = Game._generate_game_object(
BeautifulSoup(TestBoxScore.XML_GAME, 'lxml'),
dt.strptime('2013-04-29', '%Y-%m-%d'),
1
)
self.players = Players._read_objects(BeautifulSoup(TestBoxScore.XML_PLAYERS, 'lxml'), self.game)
def tearDown(self):
pass
def test_generate_object(self):
"""
Box Score Data Test
"""
soup = BeautifulSoup(TestBoxScore.XML_BOXSCORE, 'lxml')
boxscore = BoxScore._generate_object(soup, self.game, self.players)
self.assertEqual(boxscore.retro_game_id, 'OAK201304290')
self.assertEqual(boxscore.home_team_id, 'oak')
self.assertEqual(boxscore.away_team_id, 'ana')
self.assertEqual(len(boxscore.home_batting), 21)
self.assertEqual(len(boxscore.away_batting), 19)
self.assertEqual(len(boxscore.home_pitching), 8)
self.assertEqual(len(boxscore.away_pitching), 8)
def test_row(self):
"""
Box Score Row Data Test
"""
soup = BeautifulSoup(TestBoxScore.XML_BOXSCORE, 'lxml')
boxscore = BoxScore._generate_object(soup, self.game, self.players)
row = boxscore.row()
self.assertEqual(len(row), 61)
self.assertEqual(row['retro_game_id'], 'OAK201304290')
self.assertEqual(row['home_team_id'], 'oak')
self.assertEqual(row['away_team_id'], 'ana')
self.assertEqual(row['home_lineup_1_id'], '424825')
self.assertEqual(row['home_lineup_1_name'], 'Crisp')
self.assertEqual(row['home_lineup_1_pos'], 'CF')
self.assertEqual(row['home_lineup_2_id'], '452234')
self.assertEqual(row['home_lineup_2_name'], 'Smith, S')
self.assertEqual(row['home_lineup_2_pos'], 'DH-LF')
self.assertEqual(row['home_lineup_3_id'], '476704')
self.assertEqual(row['home_lineup_3_name'], 'Lowrie')
self.assertEqual(row['home_lineup_3_pos'], 'SS')
self.assertEqual(row['home_lineup_4_id'], '493316')
self.assertEqual(row['home_lineup_4_name'], 'Cespedes')
self.assertEqual(row['home_lineup_4_pos'], 'LF-CF')
self.assertEqual(row['home_lineup_5_id'], '461235')
self.assertEqual(row['home_lineup_5_name'], 'Moss')
self.assertEqual(row['home_lineup_5_pos'], '1B-RF')
self.assertEqual(row['home_lineup_6_id'], '518626')
self.assertEqual(row['home_lineup_6_name'], 'Donaldson')
self.assertEqual(row['home_lineup_6_pos'], '3B')
self.assertEqual(row['home_lineup_7_id'], '444379')
self.assertEqual(row['home_lineup_7_name'], 'Jaso')
self.assertEqual(row['home_lineup_7_pos'], 'C')
self.assertEqual(row['home_lineup_8_id'], '502210')
self.assertEqual(row['home_lineup_8_name'], 'Reddick')
self.assertEqual(row['home_lineup_8_pos'], 'RF')
self.assertEqual(row['home_lineup_9_id'], '519299')
self.assertEqual(row['home_lineup_9_name'], 'Sogard')
self.assertEqual(row['home_lineup_9_pos'], '2B')
self.assertEqual(len(json.loads(row['home_batter'])), 21)
self.assertEqual(len(json.loads(row['home_pitcher'])), 8)
self.assertEqual(row['away_lineup_1_id'], '488721')
self.assertEqual(row['away_lineup_1_name'], 'Bourjos')
self.assertEqual(row['away_lineup_1_pos'], 'CF')
self.assertEqual(row['away_lineup_2_id'], '545361')
self.assertEqual(row['away_lineup_2_name'], 'Trout')
self.assertEqual(row['away_lineup_2_pos'], 'LF-CF')
self.assertEqual(row['away_lineup_3_id'], '405395')
self.assertEqual(row['away_lineup_3_name'], 'Pujols')
self.assertEqual(row['away_lineup_3_pos'], '1B')
self.assertEqual(row['away_lineup_4_id'], '285078')
self.assertEqual(row['away_lineup_4_name'], 'Hamilton')
self.assertEqual(row['away_lineup_4_pos'], 'RF')
self.assertEqual(row['away_lineup_5_id'], '444432')
self.assertEqual(row['away_lineup_5_name'], 'Trumbo')
self.assertEqual(row['away_lineup_5_pos'], 'DH')
self.assertEqual(row['away_lineup_6_id'], '435062')
self.assertEqual(row['away_lineup_6_name'], 'Kendrick, H')
self.assertEqual(row['away_lineup_6_pos'], '2B')
self.assertEqual(row['away_lineup_7_id'], '430593')
self.assertEqual(row['away_lineup_7_name'], 'Harris, B')
self.assertEqual(row['away_lineup_7_pos'], 'SS-3B')
self.assertEqual(row['away_lineup_8_id'], '455104')
self.assertEqual(row['away_lineup_8_name'], 'Iannetta')
self.assertEqual(row['away_lineup_8_pos'], 'C')
self.assertEqual(row['away_lineup_9_id'], '499864')
self.assertEqual(row['away_lineup_9_name'], 'Jimenez, L')
self.assertEqual(row['away_lineup_9_pos'], '3B')
self.assertEqual(len(json.loads(row['away_batter'])), 19)
self.assertEqual(len(json.loads(row['away_pitcher'])), 8)
def test_get_batter(self):
"""
Batter Profile
"""
soup = BeautifulSoup(TestBoxScore.XML_BOXSCORE, 'lxml')
boxscore = BoxScore._generate_object(soup, self.game, self.players)
home_batters = soup.find('batting', attrs={'team_flag': 'home'}).find_all('batter')
crisp = boxscore._get_batter(home_batters[0])
self.assertEqual(crisp.get('bo'), '1')
self.assertEqual(crisp.get('pos'), 'CF')
self.assertEqual(crisp.get('id'), '424825')
self.assertEqual(crisp.get('first'), 'Coco')
self.assertEqual(crisp.get('last'), 'Crisp')
self.assertEqual(crisp.get('box_name'), 'Crisp')
self.assertEqual(crisp.get('rl'), 'R')
self.assertEqual(crisp.get('bats'), 'S')
self.assertTrue(crisp.get('starting'))
away_batters = soup.find('batting', attrs={'team_flag': 'away'}).find_all('batter')
shuck = boxscore._get_batter(away_batters[1])
self.assertEqual(shuck.get('bo'), '1')
self.assertEqual(shuck.get('pos'), 'LF')
self.assertEqual(shuck.get('id'), '543776')
self.assertEqual(shuck.get('first'), 'J.B.')
self.assertEqual(shuck.get('last'), 'Shuck')
self.assertEqual(shuck.get('box_name'), 'Shuck')
self.assertEqual(shuck.get('rl'), 'L')
self.assertEqual(shuck.get('bats'), 'L')
self.assertFalse(shuck.get('starting'))
def test_get_pitcher(self):
"""
Pitcher Profile
"""
soup = BeautifulSoup(TestBoxScore.XML_BOXSCORE, 'lxml')
boxscore = BoxScore._generate_object(soup, self.game, self.players)
home_pitchers = soup.find('pitching', attrs={'team_flag': 'home'}).find_all('pitcher')
straily = boxscore._get_pitcher(home_pitchers[0])
self.assertEqual(straily.get('pos'), 'P')
self.assertEqual(straily.get('id'), '573185')
self.assertEqual(straily.get('first'), 'Dan')
self.assertEqual(straily.get('last'), 'Straily')
self.assertEqual(straily.get('box_name'), 'Straily')
self.assertEqual(straily.get('rl'), 'R')
self.assertEqual(straily.get('bats'), 'R')
self.assertEqual(straily.get('out'), '14')
self.assertEqual(straily.get('bf'), '23')
away_pitchers = soup.find('pitching', attrs={'team_flag': 'away'}).find_all('pitcher')
enright = boxscore._get_pitcher(away_pitchers[7])
self.assertEqual(enright.get('pos'), 'P')
self.assertEqual(enright.get('id'), '446264')
self.assertEqual(enright.get('first'), 'Barry')
self.assertEqual(enright.get('last'), 'Enright')
self.assertEqual(enright.get('box_name'), 'Enright')
self.assertEqual(enright.get('rl'), 'R')
self.assertEqual(enright.get('bats'), 'R')
self.assertEqual(enright.get('out'), '5')
self.assertEqual(enright.get('bf'), '7')
def test_get_batting_order_starting_flg(self):
"""
Batting order number & starting flag
"""
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '100'})
self.assertEqual(bo, '1')
self.assertTrue(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '201'})
self.assertEqual(bo, '2')
self.assertFalse(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '310'})
self.assertEqual(bo, '3')
self.assertFalse(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '400'})
self.assertEqual(bo, '4')
self.assertTrue(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '5000'})
self.assertEqual(bo, False)
self.assertFalse(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': None})
self.assertEqual(bo, False)
self.assertFalse(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': '700'})
self.assertEqual(bo, '7')
self.assertTrue(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({'bo': 'U'})
self.assertEqual(bo, False)
self.assertFalse(starting)
bo, starting = BoxScore._get_batting_order_starting_flg({})
self.assertEqual(bo, False)
self.assertFalse(starting)
if __name__ == '__main__':
main()
| Shinichi-Nakagawa/pitchpx | tests/pitchpx/game/test_boxscore.py | Python | mit | 78,596 |
"""
Amazon auth backend, docs at:
http://psa.matiasaguirre.net/docs/backends/twilio.html
"""
from re import sub
from social.p3 import urlencode
from social.backends.base import BaseAuth
class TwilioAuth(BaseAuth):
name = 'twilio'
ID_KEY = 'AccountSid'
def get_user_details(self, response):
"""Return twilio details, Twilio only provides AccountSID as
parameters."""
# /complete/twilio/?AccountSid=ACc65ea16c9ebd4d4684edf814995b27e
return {'username': response['AccountSid'],
'email': '',
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Return authorization redirect url."""
key, secret = self.get_key_and_secret()
callback = self.strategy.absolute_uri(self.redirect_uri)
callback = sub(r'^https', 'http', callback)
query = urlencode({'cb': callback})
return 'https://www.twilio.com/authorize/{0}?{1}'.format(key, query)
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
account_sid = self.data.get('AccountSid')
if not account_sid:
raise ValueError('No AccountSid returned')
kwargs.update({'response': self.data, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
| GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/social/backends/twilio.py | Python | agpl-3.0 | 1,384 |
#!/usr/bin/env python
# encoding: utf-8
import sys
import os
import redis
from relo.core.log import logger
dirname = os.path.dirname(os.path.abspath(__file__))
up_dir = os.path.dirname(dirname)
sys.path.append(up_dir)
from relo.core.interfaces import Backend
class REDISDB(Backend):
name = "redis"
expiretime = 60*60*24*7 # for a week
def init(self):
logger.debug("Connecting to Redis")
self.connection = redis.StrictRedis(host='localhost', port=6379, db=12)
def check(self):
logger.debug("check not needed with redis")
def load(self):
logger.debug("Redis auto loads")
def save(self):
self.connection.save()
def addProject(self, key, project, type):
project_string = project + ":::" + type
self.connection.sadd(key, project_string)
def listProjects(self, key):
members = self.connection.smembers(key)
returnList = []
for member in members:
returnList.append(member.split(":::"))
return returnList
def addMeta(self, path, modified, hash, size, type):
pipe = self.connection.pipeline()
pipe.hmset(path, dict(modified=modified, hash=hash, size=size, type=type)).expire(path, self.expiretime).execute()
del pipe
def addSet(self, key, value):
self.connection.sadd(key, value)
def getSet(self, key):
return self.connection.smembers(key)
def get(self, key, field):
return self.connection.hget(key, field)
def find(self, key):
return self.connection.keys(pattern='*'+key+'*')
def end(self):
self.connection.shutdown() | cwoebker/relo | relo/core/backend/redisdb.py | Python | bsd-3-clause | 1,633 |
"""
Based entirely on Django's own ``setup.py``.
"""
import os
import sys
from distutils.command.install import INSTALL_SCHEMES
from distutils.command.install_data import install_data
try:
from setuptools import setup
except ImportError:
from distutils.core import setup # NOQA
try:
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass into py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
except ImportError:
PyTest = None
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is at:
# /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific
# fix for this in distutils.command.install_data#306. It fixes install_lib
# but not install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to
# the fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
if PyTest:
cmdclasses['test'] = PyTest
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, package_data = [], {}
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
extensions_dir = 'django_extensions'
for dirpath, dirnames, filenames in os.walk(extensions_dir):
# Ignore PEP 3147 cache dirs and those whose names start with '.'
dirnames[:] = [d for d in dirnames if not d.startswith('.') and d != '__pycache__']
parts = fullsplit(dirpath)
package_name = '.'.join(parts)
if '__init__.py' in filenames:
packages.append(package_name)
elif filenames:
relative_path = []
while '.'.join(parts) not in packages:
relative_path.append(parts.pop())
relative_path.reverse()
path = os.path.join(*relative_path)
package_files = package_data.setdefault('.'.join(parts), [])
package_files.extend([os.path.join(path, f) for f in filenames])
version = __import__('django_extensions').__version__
setup(
name='django-extensions',
version=version,
description="Extensions for Django",
long_description="""django-extensions bundles several useful
additions for Django projects. See the project page for more information:
http://github.com/django-extensions/django-extensions""",
author='Michael Trier',
author_email='mtrier@gmail.com',
maintainer='Bas van Oostveen',
maintainer_email='v.oostveen@gmail.com',
url='http://github.com/django-extensions/django-extensions',
license='MIT License',
platforms=['any'],
packages=packages,
cmdclass=cmdclasses,
package_data=package_data,
install_requires=['six>=1.2'],
tests_require=['Django', 'shortuuid', 'python-dateutil', 'pytest', 'tox', 'mock'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Utilities',
],
)
| jpadilla/django-extensions | setup.py | Python | mit | 4,834 |
"""Test CoolProp interaction."""
import pytest
import pygaps
import pygaps.utilities.exceptions as pgEx
@pytest.mark.utilities
class TestCoolProp():
"""Test CoolProp interaction."""
def test_backend_change(self):
"""Test if backend can change."""
previous_backend = pygaps.thermodynamic_backend()
pygaps.backend_use_refprop()
assert previous_backend != pygaps.thermodynamic_backend()
pygaps.backend_use_coolprop()
assert previous_backend == pygaps.thermodynamic_backend()
def test_backend_names_coolprop(self):
"""Test if CoolProp can be called for database adsorbates."""
for adsorbate in pygaps.ADSORBATE_LIST:
try:
adsorbate.backend.molar_mass()
except pgEx.ParameterError:
pass
def test_backend_names_refprop(self):
"""Test if RERFPROP can be called for database adsorbates."""
import CoolProp
version = CoolProp.CoolProp.get_global_param_string("REFPROP_version")
if version == 'n/a':
pass
else:
pygaps.backend_use_refprop()
for adsorbate in pygaps.ADSORBATE_LIST:
try:
adsorbate.backend.molar_mass()
except pgEx.ParameterError:
pass
except ValueError:
pass
pygaps.backend_use_coolprop()
| pauliacomi/pyGAPS | tests/utilities/test_coolprop_interaction.py | Python | mit | 1,431 |
# slightly modified from https://gist.github.com/udibr/67be473cf053d8c38730
# variation to https://github.com/ryankiros/skip-thoughts/blob/master/decoding/search.py
import numpy as np
def beamsearch(predict, end, k=1, maxsample=400):
"""return k samples (beams) and their NLL scores, each sample is a sequence of labels,
all samples starts with an `empty` label and end with `eos` or truncated to length of `maxsample`.
You need to supply `predict` which returns the label probability of each sample.
`use_unk` allow usage of `oov` (out-of-vocabulary) label in samples
"""
dead_k = 0 # samples that reached eos
dead_samples = []
dead_scores = []
live_k = 1 # samples that did not yet reached eos
live_samples = [[]]
live_scores = [0]
while live_k and dead_k < k:
# for every possible live sample calc prob for every possible label
probs = predict(live_samples)
vals = probs[1] if isinstance(probs, tuple) else np.indices(probs.shape)[-1]
probs = probs[0] if isinstance(probs, tuple) else probs
# total score for every sample is sum of -log of char prb
cand_scores = np.array(live_scores)[:, None] - np.log(probs)
cand_flat = cand_scores.flatten()
# find the best (lowest) scores we have from all possible samples and new words
ranks_flat = cand_flat.argsort()[:(k - dead_k)]
live_scores = cand_flat[ranks_flat]
# append the new words to their appropriate live sample
voc_size = probs.shape[1]
live_samples = [live_samples[r // voc_size] + [vals[r // voc_size][r % voc_size]] for r in ranks_flat]
# live samples that should be dead are...
zombie = [s[-1] == end or len(s) >= maxsample for s in live_samples]
# add zombies to the dead
dead_samples += [s for s, z in zip(live_samples, zombie) if z] # remove first label == empty
dead_scores += [s / len(l) for s, l, z in zip(live_scores, live_samples, zombie) if z]
dead_k = len(dead_samples)
# remove zombies from the living
live_samples = [s for s, z in zip(live_samples, zombie) if not z]
live_scores = [s for s, z in zip(live_scores, zombie) if not z]
live_k = len(live_samples)
return dead_samples + live_samples, dead_scores + live_scores
| milankinen/c2w2c | src/textgen/search.py | Python | mit | 2,223 |
# -*- coding: utf-8 -*-
from resources.lib.parser import cParser
from resources.lib.handler.requestHandler import cRequestHandler
import re
import urlresolver
class cHosterHandler:
def getUrl(self, oHoster):
sUrl = oHoster.getUrl()
if (oHoster.checkUrl(sUrl)):
oRequest = cRequestHandler(sUrl)
sContent = oRequest.request()
pattern = oHoster.getPattern()
if type(pattern) == type(''):
aMediaLink = cParser().parse(sContent, oHoster.getPattern())
if (aMediaLink[0] == True):
logger.info('hosterhandler: ' + aMediaLink[1][0])
return True, aMediaLink[1][0]
else:
for p in pattern:
aMediaLink = cParser().parse(sContent, p)
if (aMediaLink[0] == True):
logger.info('hosterhandler: ' + aMediaLink[1][0])
return True, aMediaLink[1][0]
return False, ''
def getHoster2(self, sHoster):
# if (sHoster.find('.') != -1):
# Arr = sHoster.split('.')
# if (Arr[0].startswith('http') or Arr[0].startswith('www')):
# sHoster = Arr[1]
# else:
# sHoster = Arr[0]
return self.getHoster(sHoster)
'''
checks if there is a resolver for a given hoster or url
'''
def getHoster(self, sHosterFileName):
if sHosterFileName != '':
source = [urlresolver.HostedMediaFile(url=sHosterFileName)]
if (urlresolver.filter_source_list(source)):
return source[0].get_host()
# media_id is in this case only a dummy
source = [urlresolver.HostedMediaFile(host=sHosterFileName, media_id='ABC123XYZ')]
if (urlresolver.filter_source_list(source)):
return source[0].get_host()
return False | StoneOffStones/plugin.video.xstream | resources/lib/handler/hosterHandler.py | Python | gpl-3.0 | 1,990 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='lkd',
version='2',
packages=['lkd', 'tests'],
author='Karan Goel',
author_email='karan@goel.im',
maintainer='Karan Goel',
maintainer_email='karan@goel.im',
url='http://www.goel.im/',
license='MIT License',
long_description='Python wrapper for lkd.to API.',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
| karan/py-lkd.to | setup.py | Python | mit | 1,225 |
import logging
import os
import re
from glob import glob
from toolbox.config.docker import CONFIG_KEYS, CRP_CONFIG_ITEM_KEYS, CAPABILITIES
from toolbox.utils import check_common_files, counted_error, validate_bool, validate_flag, validate_ports
from .utils import sorted_container_configs, yield_dockerfiles
def check_config(config: dict): # pylint: disable=too-many-branches
invalid_keys = set(config.keys()) - set(CONFIG_KEYS)
if invalid_keys:
counted_error('Invalid key(s) found in config.yml: %s', invalid_keys)
first = True
for _, item in sorted_container_configs(config['crp_config']):
if not isinstance(item, dict):
counted_error('Items of crp_config must be dictionaries.')
invalid_keys = set(item.keys()) - set(CRP_CONFIG_ITEM_KEYS)
if invalid_keys:
counted_error('Invalid key(s) found in crp_config: %s', invalid_keys)
if 'capabilities' in item:
invalid_caps = set(item['capabilities']) - CAPABILITIES
if invalid_caps:
counted_error('Forbidden capabilities: %s\n\tAllowed capabilities: %s',
invalid_caps, CAPABILITIES)
if 'mem_limit_mb' in item:
if not str(item['mem_limit_mb']).isnumeric() or not 8 <= int(item['mem_limit_mb']) <= 4096:
counted_error('Invalid mem_limit_mb: %s. It should be an integer between 8 and 2048 MegaBytes.',
item['mem_limit_mb'])
if 'cpu_limit_ms' in item:
if not str(item['cpu_limit_ms']).isnumeric() or not 100 <= int(item['cpu_limit_ms']) <= 4000:
counted_error('Invalid cpu_limit_ms: %s. It should be an integer between 100 and 4000 CPU milliseconds.',
item['cpu_limit_ms'])
if 'volumes' in item:
if not first:
counted_error('Only the first container [solvable, controller, ...] can set shared volumes.')
else:
logging.warning('Shared volumes are manually set. Is this what you want?')
validate_bool('read_only', item.get('read_only', '0'))
validate_ports(item.get('ports', []), item.get('buttons', None))
first = False
validate_flag(config)
def check_dockerfile(filename):
repo_pattern = r'FROM ((docker\.io\/)?avatao|eu\.gcr\.io\/avatao-challengestore)\/'
try:
with open(filename, 'r') as f:
d = f.read()
if re.search(repo_pattern, d) is None:
counted_error('Please, use avatao base images for your challenges. Our base images '
'are available at https://hub.docker.com/u/avatao/')
except FileNotFoundError as e:
counted_error('Could not open %s', e.filename)
except Exception as e:
counted_error('An error occurred while loading %s. \n\tDetails: %s', filename, e)
def check_misc():
check_common_files()
if not glob('src/*'):
logging.warning('Missing or empty "src" directory. Please, place your source files there '
'if your challenge has any.')
def run(repo_path: str, repo_name: str, repo_branch: str, config: dict):
os.chdir(repo_path)
check_config(config)
check_misc()
for dockerfile, _ in yield_dockerfiles(repo_path, repo_branch, repo_name, config['crp_config']):
check_dockerfile(dockerfile)
| avatao-content/challenge-toolbox | toolbox/docker/check.py | Python | apache-2.0 | 3,414 |
"""EB for the unit level model.
This module implements the basic EB unit level model. The functionalities are organized in
classes. Each class has three main methods: *fit()*, *predict()* and *bootstrap_mse()*.
Linear Mixed Models (LMM) are the core underlying statistical framework used to model the hierarchical nature of the small area estimation (SAE) techniques implemented in this module, see McCulloch, C.E. and Searle, S.R. (2001) [#ms2001]_ for more details on LMM.
The *EbUnitModel* class implements the model developed by Molina, I. and Rao, J.N.K. (2010)
[#mr2010]_. So far, only the basic approach requiring the normal distribution of the errors is
implemented. This approach allows estimating complex indicators such as poverty indices and
other nonlinear paramaters. The class fits the model parameters using REML or ML. To predict the
area level indicators estimates, a Monte Carlo (MC) approach is used. MSE estimation is achieved
using a bootstrap procedure.
For a comprehensive review of the small area estimation models and its applications,
see Rao, J.N.K. and Molina, I. (2015) [#rm2015]_.
.. [#ms2001] McCulloch, C.E.and Searle, S.R. (2001), *Generalized, Linear, Mixed Models*,
New York: John Wiley & Sons, Inc.
.. [#mr2010] Molina, , I. and Rao, J.N.K. (2010), Small Area Estimation of Poverty Indicators,
*Canadian Journal of Statistics*, **38**, 369-385.
.. [#rm2015] Rao, J.N.K. and Molina, I. (2015), *Small area estimation, 2nd edn.*,
John Wiley & Sons, Hoboken, New Jersey.
"""
from __future__ import annotations
import warnings
from typing import Any, Callable, Optional, Union
import numpy as np
import pandas as pd
import statsmodels.api as sm
from samplics.sae.eblup_unit_model import EblupUnitModel
from samplics.utils import basic_functions, formats
from samplics.utils.types import Array, DictStrNum, Number
class EbUnitModel:
"""*EbUnitModel* implements the basic Unit level model for complex indicators.
*EbUnitModel* takes the sample data as input and fits the basic linear mixed model.
The user can pick between restricted maximum likelihood (REML) or maximum likelihood (ML)
to fit the model parameters. Also, EbUnitModel predicts the areas means and provides
the point and mean squared error (MSE) estimates of the empirical Bayes linear
unbiased (EBLUP). User can also obtain the bootstrap mse estimates of the MSE.
*EbUnitModel* requires the user to provide the indicator function. The indicator function is
expected to take the array of output sample observations as input and possibly some additional
parameters needed to compute the indicator. The indicator function outputs an aggregated value.
For example, the poverty gap indicator can have the following signature
pov_gap(y: array, pov_line: float) -> float. If the indicator function different outputs by
area then the self.area_list can be used to incorporate different logics across areas.
Also, *EbUnitModel* can use Boxcox to transform the output sample values in order to reduce
the asymmetry in the datawhen fitting the linear mixed model.
Setting attributes
| method (str): the fitting method of the model parameters which can take the possible
| values restricted maximum likelihood (REML) or maximum likelihood (ML).
| If not specified, "REML" is used as default.
| indicator (function): a user defined function to compute the indicator.
| boxcox (dict): contains the *lambda* parameter of the Boxcox and a constant for the
| log-transformation of the Boxcox.
Sample related attributes
| ys (array): the output sample observations.
| Xs (ndarray): the auxiliary information.
| scales (array): an array of scaling parameters for the unit levels errors.
| afactors (array): sum of the inverse squared of scale.
| areas (array): the full vector of small areas from the sampled observations.
| areas_list (array): the list of small areas from the sample data.
| samp_size (dict): the sample size per small areas from the sample.
| ys_mean (array): sample area means of the output variable.
| Xs_mean (ndarray): sample area means of the auxiliary variables.
Model fitting attributes
| fitted (boolean): indicates whether the model has been fitted or not.
| fixed_effects (array): the estimated fixed effects of the regression model.
| fe_std (array): the estimated standard errors of the fixed effects.
| random_effects (array): the estimated area level random effects.
| associated with the small areas.
| re_std (number): the estimated standard error of the random effects.
| error_std (number): standard error of the unit level residuals.
| convergence (dict): a dictionnary holding the convergence status and the number of
| iterations from the model fitting algorithm.
| goodness (dict): a dictionary holding the log-likelihood, AIC, and BIC.
| gamma (dict): ratio of the between-area variability (re_std**2) to the total
| variability (re_std**2 + error_std**2 / a_factor).
Prediction related attributes
| areap (array): the list of areas for the prediction.
| number_reps (int): number of replicates for the bootstrap MSE estimation.
| area_est (array): area level EBLUP estimates.
| area_mse (array): area level taylor estimation of the MSE.
| area_mse_boot (array): area level bootstrap estimation of the MSE.
Main methods
| fit(): fits the linear mixed model to estimate the model parameters using REMl or ML
| methods.
| predict(): predicts the area level indicator estimates which includes both the point
| estimates and the taylor MSE estimate.
| bootstrap_mse(): computes the area level bootstrap MSE estimates of the indicator.
"""
def __init__(
self,
method: str = "REML",
boxcox: Optional[Number] = None,
constant: Optional[Number] = None,
):
# Setting
self.method: str = method.upper()
if self.method not in ("REML", "ML"):
raise AssertionError("Value provided for method is not valid!")
self.indicator: Callable[..., Any]
self.number_samples: int
self.boxcox: dict[str, Optional[Number]] = {"lambda": boxcox, "constant": constant}
# Sample data
self.scales: np.ndarray
self.afactors: DictStrNum
self.ys: np.ndarray
self.Xs: np.ndarray
self.areas: np.ndarray
self.areas_list: np.ndarray
self.samp_size: DictStrNum
self.ys_mean: np.ndarray
self.Xs_mean: np.ndarray
# Fitted data
self.fitted: bool = False
self.fixed_effects: np.ndarray
self.fe_std: np.ndarray
self.random_effects: np.ndarray
self.re_std: float
self.error_std: float
self.convergence: dict[str, Union[float, int, bool]] = {}
self.goodness: dict[str, Number] = {} # loglikehood, deviance, AIC, BIC
self.gamma: DictStrNum
# Predict(ion/ed) data
self.number_reps: int
self.area_est: DictStrNum
self.area_mse: DictStrNum
self.area_mse_boot: Optional[DictStrNum] = None
def _transformation(self, y: np.ndarray, inverse: bool) -> np.ndarray:
if self.boxcox["lambda"] is None:
return y
elif self.boxcox["lambda"] == 0.0 and self.boxcox["constant"] is not None:
if inverse:
return np.asarray(np.exp(y) - self.boxcox["constant"])
else:
return np.asarray(np.log(y + self.boxcox["constant"]))
elif self.boxcox["lambda"] != 0.0:
if inverse:
return np.asarray(
np.exp(np.log(1 + y * self.boxcox["lambda"]) / self.boxcox["lambda"])
)
else:
return np.asarray(np.power(y, self.boxcox["lambda"]) / self.boxcox["lambda"])
else:
raise AssertionError
def fit(
self,
ys: Array,
Xs: Array,
areas: Array,
samp_weight: Optional[Array] = None,
scales: Union[Array, Number] = 1,
intercept: bool = True,
tol: float = 1e-8,
maxiter: int = 100,
) -> None:
"""Fits the linear mixed models to estimate the model parameters that is the fixed
effects, the random effects standard error and the unit level residuals' standard error.
In addition, the method provides statistics related to the model fitting e.g. convergence
status, log-likelihood, AIC, BIC, and more.
Args:
ys (Array): An array of the output sample observations.
Xs (Array): An multi-dimensional array of the sample auxiliary information.
areas (Array): provides the area of the sampled observations.
samp_weight (Optional[Array], optional): An array of the sample weights.
Defaults to None.
scales (Union[Array, Number], optional): the scale factor for the unit level errors.
If a single number of provided, the same number will be applied to all observations. Defaults to 1.
intercept (bool, optional): An boolean to indicate whether an intercept need to be
added to Xs. Defaults to True
tol (float, optional): tolerance used for convergence criteria. Defaults to 1.0e-4.
maxiter (int, optional): maximum number of iterations for the fitting algorithm.
Defaults to 100.
"""
ys = formats.numpy_array(ys)
ys_transformed = basic_functions.transform(
ys,
llambda=self.boxcox["lambda"],
constant=self.boxcox["constant"],
inverse=False,
)
eblup_ul = EblupUnitModel(
method=self.method,
)
eblup_ul.fit(
ys_transformed, Xs, areas, samp_weight, scales, intercept, tol=tol, maxiter=maxiter
)
self.scales = eblup_ul.scales
self.ys = eblup_ul.ys
self.Xs = eblup_ul.Xs
self.areas = eblup_ul.areas
self.areas_list = eblup_ul.areas_list
self.afactors = eblup_ul.afactors
self.error_std = eblup_ul.error_std
self.fixed_effects = eblup_ul.fixed_effects
self.fe_std = eblup_ul.fe_std
self.re_std = eblup_ul.re_std
self.convergence = eblup_ul.convergence
self.goodness = eblup_ul.goodness
self.ys_mean = eblup_ul.ys_mean
self.Xs_mean = eblup_ul.Xs_mean
self.gamma = eblup_ul.gamma
self.samp_size = eblup_ul.samp_size
self.fitted = eblup_ul.fitted
def _predict_indicator(
self,
number_samples: int,
y_s: np.ndarray,
X_s: np.ndarray,
area_s: np.ndarray,
X_r: np.ndarray,
area_r: np.ndarray,
arear_list: np.ndarray,
fixed_effects: np.ndarray,
gamma: np.ndarray,
sigma2e: float,
sigma2u: float,
scale: np.ndarray,
intercept: bool,
max_array_length: int,
indicator: Callable[..., Any],
show_progress: bool,
**kwargs: Any,
) -> np.ndarray:
if intercept:
if self.Xs_mean.ndim == 1:
n = self.Xs_mean.shape[0]
Xs_mean = np.insert(self.Xs_mean.reshape(n, 1), 0, 1, axis=1)
else:
Xs_mean = np.insert(self.Xs_mean, 0, 1, axis=1)
else:
Xs_mean = self.Xs_mean
nb_arear = len(arear_list)
mu_r = X_r @ fixed_effects
if show_progress:
bar_length = min(50, nb_arear)
steps = np.linspace(1, nb_arear - 1, bar_length).astype(int)
print(f"Generating the {number_samples} replicates samples")
k = 0
eta = np.zeros((number_samples, nb_arear)) * np.nan
for i, d in enumerate(arear_list):
# print(d)
oos = area_r == d
mu_dr = mu_r[oos]
ss = self.areas_list == d
ybar_d = self.ys_mean[ss]
xbar_d = Xs_mean[ss]
mu_bias_dr = self.gamma[d] * (ybar_d - xbar_d @ fixed_effects)
scale_dr = scale[oos]
N_dr = np.sum(oos)
cycle_size = max(int(max_array_length // N_dr), 1)
number_cycles = int(number_samples // cycle_size)
last_cycle_size = number_samples % cycle_size
y_dr = None
for j in range(number_cycles + 1):
if j == number_cycles:
cycle_size = last_cycle_size
re_effects = np.random.normal(
scale=(sigma2u * (1 - self.gamma[d])) ** 0.5,
size=cycle_size,
)
errors = np.random.normal(
scale=scale_dr * (sigma2e ** 0.5), size=(cycle_size, N_dr)
)
y_dr_j = mu_dr[None, :] + mu_bias_dr + re_effects[:, None] + errors
if j == 0:
y_dr = y_dr_j
else:
y_dr = np.append(y_dr, y_dr_j, axis=0)
if show_progress:
if i in steps:
k += 1
print(
f"\r[%-{bar_length}s] %d%%"
% ("=" * (k + 1), (k + 1) * (100 / bar_length)),
end="",
)
y_d = np.append(y_dr, np.tile(y_s[area_s == d], [number_samples, 1]), axis=1)
z_d = basic_functions.transform(
y_d,
llambda=self.boxcox["lambda"],
constant=self.boxcox["constant"],
inverse=True,
)
eta[:, i] = np.apply_along_axis(indicator, axis=1, arr=z_d, **kwargs) # *)
if show_progress:
print("\n")
return np.asarray(np.mean(eta, axis=0))
def predict(
self,
Xr: Array,
arear: Array,
indicator: Callable[..., Array],
number_samples: int,
scaler: Union[Array, Number] = 1,
intercept: bool = True,
max_array_length: int = int(100e6),
show_progress: bool = True,
**kwargs: Any,
) -> None:
"""Predicts the area level means and provides the taylor MSE estimation of the estimated
area means.
Args:
Xr (Array): an multi-dimensional array of the out of sample auxiliary variables.
arear (Array): provides the area of the out of sample units.
indicator (Callable[..., Array]): a user defined function which computes the area level
indicators. The function should take y (output variable) as the first parameters,
additional parameters can be used. Use ***kwargs* to transfer the additional
parameters.
number_samples (int): number of replicates for the Monte-Carlo (MC) algorithm.
scaler (Union[Array, Number], optional): the scale factor for the unit level errors.
If a single number of provided, the same number will be applied to all observations. Defaults to 1.
intercept (bool, optional): An boolean to indicate whether an intercept need to be
added to Xr. Defaults to True.
max_array_length (int, optional): controls the number of replicates to generate at
the same time. This parameter helps with performance. The number can be reduce or
increase based on the user's computer RAM capacity. Defaults to int(100e6).
show_progress (bool, optional): shows a bar progress of the MC replicates
calculations. Defaults to True.
Raises:
Exception: when predict() is called before fitting the model.
"""
if not self.fitted:
raise Exception(
"The model must be fitted first with .fit() before running the prediction."
)
self.number_samples = int(number_samples)
Xr = formats.numpy_array(Xr)
arear = formats.numpy_array(arear)
self.arear_list = np.unique(arear)
if isinstance(scaler, (float, int)):
scaler = np.asarray(np.ones(Xr.shape[0]) * scaler)
else:
scaler = formats.numpy_array(scaler)
if intercept:
if Xr.ndim == 1:
n = Xr.shape[0]
Xr = np.insert(Xr.reshape(n, 1), 0, 1, axis=1)
Xs = np.insert(self.Xs.reshape(n, 1), 0, 1, axis=1)
else:
Xr = np.insert(Xr, 0, 1, axis=1)
Xs = np.insert(self.Xs, 0, 1, axis=1)
else:
Xs = self.Xs
area_est = self._predict_indicator(
self.number_samples,
self.ys,
Xs,
self.areas,
Xr,
arear,
self.arear_list,
self.fixed_effects,
np.asarray(list(self.gamma.values())),
self.error_std ** 2,
self.re_std ** 2,
scaler,
intercept,
max_array_length,
indicator,
show_progress,
**kwargs,
)
self.area_est = dict(zip(self.arear_list, area_est))
def bootstrap_mse(
self,
Xr: Array,
arear: Array,
indicator: Callable[..., Array],
number_reps: int,
scaler: Union[Array, Number] = 1,
intercept: bool = True,
tol: float = 1e-6,
maxiter: int = 100,
max_array_length: int = int(100e6),
show_progress: bool = True,
**kwargs: Any,
) -> None:
"""Computes the MSE bootstrap estimates of the area level indicator estimates.
Args:
Xr (Array): an multi-dimensional array of the out of sample auxiliary variables.
arear (Array): provides the area of the out of sample units.
indicator (Callable[..., Array]): [description]
number_reps (int): [description]
scaler (Union[Array, Number], optional): [description]. Defaults to 1.
intercept (bool, optional): [description]. Defaults to True.
tol (float, optional): tolerance used for convergence criteria. Defaults to 1.0e-4.
maxiter (int, optional): maximum number of iterations for the fitting algorithm.
Defaults to 100.
max_array_length (int, optional): [description]. Defaults to int(100e6).
show_progress (bool, optional): shows a bar progress of the bootstrap replicates
calculations. Defaults to True.
"""
X_r = formats.numpy_array(Xr)
area_r = formats.numpy_array(arear)
arear_list = np.unique(area_r)
if intercept:
if X_r.ndim == 1:
n = X_r.shape[0]
X_r = np.insert(X_r.reshape(n, 1), 0, 1, axis=1)
Xs = np.insert(self.Xs.reshape(n, 1), 0, 1, axis=1)
else:
X_r = np.insert(X_r, 0, 1, axis=1)
Xs = np.insert(self.Xs, 0, 1, axis=1)
else:
Xs = self.Xs
if isinstance(scaler, (float, int)):
scale_r = np.ones(X_r.shape[0]) * scaler
else:
scale_r = formats.numpy_array(scaler)
ps = np.isin(area_r, self.areas_list)
areas_ps = np.unique(area_r[ps])
nb_areas_ps = areas_ps.size
area_s = self.areas[np.isin(self.areas, arear_list)]
area = np.append(area_r, area_s)
scale_s = self.scales[np.isin(self.areas, arear_list)]
scale = np.append(scale_r, scale_s)
_, N_d = np.unique(area, return_counts=True)
X_s = Xs[np.isin(self.areas, arear_list)]
X = np.append(X_r, X_s, axis=0)
aboot_factor = np.zeros(nb_areas_ps)
indice_dict = {}
area_dict = {}
scale_dict = {}
scale_s_dict = {}
a_factor_dict = {}
sample_size_dict = {}
X_dict = {}
X_s_dict = {}
for i, d in enumerate(arear_list):
area_ds = area_s == d
indice_dict[d] = area == d
area_dict[d] = area[indice_dict[d]]
scale_dict[d] = scale[indice_dict[d]]
a_factor_dict[d] = self.afactors[d] if d in self.areas_list else 0
sample_size_dict[d] = self.samp_size[d] if d in self.areas_list else 0
scale_s_dict[d] = scale_s[area_ds] if d in self.areas_list else 0
X_dict[d] = X[indice_dict[d]]
X_s_dict[d] = X_s[area_ds]
cycle_size = max(int(max_array_length // sum(N_d)), 1)
number_cycles = int(number_reps // cycle_size)
last_cycle_size = number_reps % cycle_size
number_cycles = number_cycles + 1 if last_cycle_size > 0 else number_cycles
k = 0
bar_length = min(50, number_cycles * nb_areas_ps)
steps = np.linspace(1, number_cycles * nb_areas_ps - 1, bar_length).astype(int)
eta_pop_boot = np.zeros((number_reps, nb_areas_ps))
eta_samp_boot = np.zeros((number_reps, nb_areas_ps))
y_samp_boot = np.zeros((number_reps, int(np.sum(list(sample_size_dict.values())))))
print(f"Generating the {number_reps} bootstrap replicate populations")
for b in range(number_cycles):
start = b * cycle_size
end = (b + 1) * cycle_size
if b == number_cycles - 1:
end = number_reps
cycle_size = last_cycle_size
yboot_s = None
for i, d in enumerate(areas_ps):
aboot_factor[i] = a_factor_dict[d]
re_d = np.random.normal(
scale=self.re_std * (1 - self.gamma[d]) ** 0.5,
size=cycle_size,
)
err_d = np.random.normal(
scale=self.error_std * scale_dict[d],
size=(cycle_size, np.sum(indice_dict[d])),
)
yboot_d = (X_dict[d] @ self.fixed_effects)[None, :] + re_d[:, None] + err_d
zboot_d = basic_functions.transform(
yboot_d,
llambda=self.boxcox["lambda"],
constant=self.boxcox["constant"],
inverse=True,
)
eta_pop_boot[start:end, i] = indicator(zboot_d, **kwargs)
if i == 0:
yboot_s = yboot_d[:, -int(sample_size_dict[d]) :]
else:
yboot_s = np.append(yboot_s, yboot_d[:, -int(sample_size_dict[d]) :], axis=1)
if show_progress:
run_id = b * nb_areas_ps + i
if run_id in steps:
k += 1
print(
f"\r[%-{bar_length}s] %d%%"
% ("=" * (k + 1), (k + 1) * (100 / bar_length)),
end="",
)
y_samp_boot[start:end, :] = yboot_s
if show_progress:
print("\n")
k = 0
bar_length = min(50, number_reps)
steps = np.linspace(1, number_reps, bar_length).astype(int)
reml = True if self.method == "REML" else False
# with warnings.catch_warnings():
# warnings.filterwarnings("ignore")
# beta_ols = sm.OLS(y_samp_boot[0, :], X_s).fit().params
# resid_ols = y_samp_boot[0, :] - np.matmul(X_s, beta_ols)
# re_ols = basic_functions.sumby(area_s, resid_ols) / basic_functions.sumby(
# area_s, np.ones(area_s.size)
# )
fit_kwargs = {
"tol": tol,
"gtol": tol,
# "pgtol": tol,
"maxiter": maxiter,
} # TODO: to improve in the future. Check: statsmodels.LikelihoodModel.fit()
print(f"Fitting and predicting using each of the {number_reps} bootstrap populations")
for b in range(number_reps):
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
boot_model = sm.MixedLM(y_samp_boot[b, :], X_s, area_s)
boot_fit = boot_model.fit(
reml=reml,
# start_params=np.append(beta_ols, np.std(re_ols) ** 2),
full_output=True,
**fit_kwargs,
)
gammaboot = float(boot_fit.cov_re) / (
float(boot_fit.cov_re) + boot_fit.scale * (1 / aboot_factor)
)
eta_samp_boot[b, :] = self._predict_indicator(
self.number_samples,
y_samp_boot[b, :],
X_s,
area_s,
X_r,
area_r,
np.unique(area_r),
boot_fit.fe_params,
gammaboot,
boot_fit.scale,
float(boot_fit.cov_re),
scale_r,
intercept,
max_array_length,
indicator,
False,
**kwargs,
)
if show_progress:
if b in steps:
k += 1
print(
f"\r[%-{bar_length}s] %d%%"
% ("=" * (k + 1), (k + 1) * (100 / bar_length)),
end="",
)
print("\n")
mse_boot = np.asarray(np.mean(np.power(eta_samp_boot - eta_pop_boot, 2), axis=0))
self.area_mse_boot = dict(zip(self.arear_list, mse_boot))
def to_dataframe(
self,
col_names: list[str] = ["_area", "_estimate", "_mse_boot"],
) -> pd.DataFrame:
"""Returns a pandas dataframe from dictionaries with same keys and one value per key.
Args:
col_names (list, optional): list of string to be used for the dataframe columns names.
Defaults to ["_area", "_estimate", "_mse_boot"].
Returns:
[pd.DataFrame]: a pandas dataframe
"""
ncols = len(col_names)
if self.area_est is None:
raise AssertionError("No prediction yet. Must predict the area level estimates.")
elif self.area_mse_boot is None and ncols not in (2, 3):
raise AssertionError("col_names must have 2 or 3 values")
elif self.area_mse_boot is None and ncols == 3:
col_names.pop() # remove the last element same as .pop(-1)
if self.area_mse_boot is None:
area_df = formats.dict_to_dataframe(col_names, self.area_est)
else:
area_df = formats.dict_to_dataframe(col_names, self.area_est, self.area_mse_boot)
return area_df
| survey-methods/samplics | src/samplics/sae/eb_unit_model.py | Python | mit | 26,920 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Apache License.
import json
from azurelinuxagent.common.protocol.restapi import ExtensionStatus
from azurelinuxagent.ga.exthandlers import parse_ext_status
from tests.tools import *
class TestExtHandlers(AgentTestCase):
def test_parse_extension_status00(self):
"""
Parse a status report for a successful execution of an extension.
"""
s = '''[{
"status": {
"status": "success",
"formattedMessage": {
"lang": "en-US",
"message": "Command is finished."
},
"operation": "Daemon",
"code": "0",
"name": "Microsoft.OSTCExtensions.CustomScriptForLinux"
},
"version": "1.0",
"timestampUTC": "2018-04-20T21:20:24Z"
}
]'''
ext_status = ExtensionStatus(seq_no=0)
parse_ext_status(ext_status, json.loads(s))
self.assertEqual('0', ext_status.code)
self.assertEqual(None, ext_status.configurationAppliedTime)
self.assertEqual('Command is finished.', ext_status.message)
self.assertEqual('Daemon', ext_status.operation)
self.assertEqual('success', ext_status.status)
self.assertEqual(0, ext_status.sequenceNumber)
self.assertEqual(0, len(ext_status.substatusList))
def test_parse_extension_status01(self):
"""
Parse a status report for a failed execution of an extension.
The extension returned a bad status/status of failed.
The agent should handle this gracefully, and convert all unknown
status/status values into an error.
"""
s = '''[{
"status": {
"status": "failed",
"formattedMessage": {
"lang": "en-US",
"message": "Enable failed: Failed with error: commandToExecute is empty or invalid ..."
},
"operation": "Enable",
"code": "0",
"name": "Microsoft.OSTCExtensions.CustomScriptForLinux"
},
"version": "1.0",
"timestampUTC": "2018-04-20T20:50:22Z"
}]'''
ext_status = ExtensionStatus(seq_no=0)
parse_ext_status(ext_status, json.loads(s))
self.assertEqual('0', ext_status.code)
self.assertEqual(None, ext_status.configurationAppliedTime)
self.assertEqual('Enable failed: Failed with error: commandToExecute is empty or invalid ...', ext_status.message)
self.assertEqual('Enable', ext_status.operation)
self.assertEqual('error', ext_status.status)
self.assertEqual(0, ext_status.sequenceNumber)
self.assertEqual(0, len(ext_status.substatusList))
| andyliuliming/WALinuxAgent | tests/ga/test_exthandlers.py | Python | apache-2.0 | 2,603 |
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HProperty2_connectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HProperty2_connectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HProperty2_connectedLHS, self).__init__(name='HProperty2_connectedLHS', num_nodes=3, edges=[])
# Add the edges
self.add_edges([[1, 0], [0, 2]])
# Set the graph attributes
self["mm__"] = ['MT_pre__ECoreMM', 'MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = 3819550979256244747
# Set the node attributes
self.vs[0]["MT_subtypeMatching__"] = False
self.vs[0]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "eStructuralFeatures"
"""
self.vs[0]["MT_label__"] = """13"""
self.vs[0]["mm__"] = """MT_pre__directLink_S"""
self.vs[0]["MT_subtypes__"] = []
self.vs[0]["MT_dirty__"] = False
self.vs[0]["GUID__"] = 3022664974983803266
self.vs[1]["MT_subtypeMatching__"] = False
self.vs[1]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_label__"] = """1"""
self.vs[1]["mm__"] = """MT_pre__EClass"""
self.vs[1]["MT_subtypes__"] = []
self.vs[1]["MT_dirty__"] = False
self.vs[1]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["GUID__"] = 7048466522854834351
self.vs[2]["MT_subtypeMatching__"] = True
self.vs[2]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["mm__"] = """MT_pre__EStructuralFeature"""
self.vs[2]["MT_subtypes__"] = []
self.vs[2]["MT_dirty__"] = False
self.vs[2]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["GUID__"] = 6204361095848266049
def eval_associationType13(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "eStructuralFeatures"
def eval_classtype1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
| levilucio/SyVOLT | ECore_Copier_MM/properties/positive/himesis/HProperty2_connectedLHS.py | Python | mit | 12,317 |
from zope.interface import implements
from twisted.internet import reactor
from twisted.internet import defer
from webut.skin import iskin
from ldaptor.protocols import pureldap
from ldaptor.protocols.ldap import ldapsyntax, distinguishedname
from ldaptor import generate_password, interfaces
from ldaptor.apps.webui.uriquote import uriUnquote
from ldaptor import weave
from ldaptor.apps.webui.i18n import _
from ldaptor.apps.webui import i18n
import os
from nevow import rend, inevow, loaders, url, tags
from formless import annotate, webform, iformless, configurable
def getEntry(ctx, dn):
user = ctx.locate(inevow.ISession).getLoggedInRoot().loggedIn
e=ldapsyntax.LDAPEntry(client=user.client, dn=dn)
return e
def getEntryWithAttributes(ctx, dn, *attributes):
e = getEntry(ctx, dn)
d = e.fetch(*attributes)
return d
def getServiceName(ctx, dn):
d = getEntryWithAttributes(ctx, dn, 'cn')
def _cb(e):
for cn in e.get('cn', []):
return cn
raise RuntimeError, \
_("Service password entry has no attribute cn: %r") % e
d.addCallback(_cb)
return d
def checkPasswordTypos(newPassword, again):
if newPassword != again:
raise annotate.ValidateError(
{},
formErrorMessage=_('Passwords do not match.'))
class RemoveServicePassword(configurable.Configurable):
def __init__(self, dn):
super(RemoveServicePassword, self).__init__(None)
self.dn = dn
def getBindingNames(self, ctx):
return ['remove']
def bind_remove(self, ctx):
return annotate.MethodBinding(
'remove',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
],
label=_('Remove')),
action=_('Remove'))
def remove(self, ctx):
e = getEntry(ctx, self.dn)
d = getServiceName(ctx, self.dn)
def _delete(name, e):
d = e.delete()
d.addCallback(lambda _: name)
return d
d.addCallback(_delete, e)
def _report(name):
return _('Removed service %r') % name
d.addCallback(_report)
return d
class SetServicePassword(configurable.Configurable):
def __init__(self, dn):
super(SetServicePassword, self).__init__(None)
self.dn = dn
def getBindingNames(self, ctx):
return ['setServicePassword']
def bind_setServicePassword(self, ctx):
return annotate.MethodBinding(
'setServicePassword',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
annotate.Argument('newPassword', annotate.PasswordEntry(required=True,
label=_('New password'))),
annotate.Argument('again', annotate.PasswordEntry(required=True,
label=_('Again'))),
],
label=_('Set password')),
action=_('Set password'))
def _isPasswordAcceptable(self, ctx, newPassword, again):
return checkPasswordTypos(newPassword, again)
def setServicePassword(self, ctx, newPassword, again):
d = defer.maybeDeferred(self._isPasswordAcceptable, ctx, newPassword, again)
def _setPassword(ctx, dn, newPassword):
e = getEntry(ctx, dn)
d=defer.maybeDeferred(e.setPassword, newPasswd=newPassword)
return d
d.addCallback(lambda _: _setPassword(ctx, self.dn, newPassword))
def _getName(_, ctx):
d = getServiceName(ctx, self.dn)
return d
d.addCallback(_getName, ctx)
def _report(name):
return _('Set password for service %r') % name
d.addCallback(_report)
return d
class SetRandomServicePassword(configurable.Configurable):
def __init__(self, dn):
super(SetRandomServicePassword, self).__init__(None)
self.dn = dn
def getBindingNames(self, ctx):
return ['generateRandom']
def bind_generateRandom(self, ctx):
return annotate.MethodBinding(
'generateRandom',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
],
label=_('Generate random')),
action=_('Generate random'))
def generateRandom(self, ctx):
d=generate_password.generate(reactor)
def _first(passwords):
assert len(passwords)==1
return passwords[0]
d.addCallback(_first)
def _setPass(newPassword, ctx):
e = getEntry(ctx, self.dn)
d = e.setPassword(newPassword)
def _getName(_, ctx):
d = getServiceName(ctx, self.dn)
return d
d.addCallback(_getName, ctx)
def _report(name, newPassword):
return _('Service %r password set to %s') % (name, newPassword)
d.addCallback(_report, newPassword)
return d
d.addCallback(_setPass, ctx)
return d
class AddService(configurable.Configurable):
def __init__(self, dn):
super(AddService, self).__init__(None)
self.dn = dn
def getBindingNames(self, ctx):
return ['add']
def bind_add(self, ctx):
return annotate.MethodBinding(
'add',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
annotate.Argument('serviceName', annotate.String(required=True,
label=_('Service name'))),
annotate.Argument('newPassword', annotate.PasswordEntry(required=False,
label=_('New password'),
description=_("Leave empty to generate random password."))),
annotate.Argument('again', annotate.PasswordEntry(required=False,
label=_('Again'))),
],
label=_('Add')),
action=_('Add'))
def add(self, ctx, serviceName, newPassword, again):
if newPassword or again:
checkPasswordTypos(newPassword, again)
if not newPassword:
d = self._generate(ctx, serviceName)
else:
d = self._add(ctx, newPassword, serviceName)
return d
def _cbSetPassword(self, ctx, newPassword, serviceName):
e = getEntry(ctx, self.dn)
rdn = distinguishedname.RelativeDistinguishedName(
attributeTypesAndValues=[
distinguishedname.LDAPAttributeTypeAndValue(
attributeType='cn', value=serviceName),
distinguishedname.LDAPAttributeTypeAndValue(
attributeType='owner', value=str(self.dn))
])
d = e.addChild(rdn, {
'objectClass': ['serviceSecurityObject'],
'cn': [serviceName],
'owner': [str(self.dn)],
'userPassword': ['{crypt}!'],
})
def _setPass(e, newPassword):
d = e.setPassword(newPassword)
return d
d.addCallback(_setPass, newPassword)
return d
def _generate(self, ctx, serviceName):
d=generate_password.generate(reactor)
def _first(passwords):
assert len(passwords)==1
return passwords[0]
d.addCallback(_first)
def _cb(newPassword, serviceName):
d = self._cbSetPassword(ctx, newPassword, serviceName)
d.addCallback(lambda dummy: _('Added service %r with password %s') % (serviceName, newPassword))
return d
d.addCallback(_cb, serviceName)
return d
def _add(self, ctx, newPassword, serviceName):
d = self._cbSetPassword(ctx, newPassword, serviceName)
def _report(dummy, name):
return _('Added service %r') % name
d.addCallback(_report, serviceName)
return d
class ServicePasswordChangeMixin(object):
def __init__(self, dn):
super(ServicePasswordChangeMixin, self).__init__()
self.dn = dn
def listServicePasswordActions(self):
l = [(int(pri), name)
for x, pri, name in [name.split('_', 2) for name in dir(self)
if name.startswith('servicePasswordAction_')]]
l.sort()
for pri, name in l:
yield name
def getServicePasswordAction(self, name):
for attrName in dir(self):
if not attrName.startswith('servicePasswordAction_'):
continue
x, pri, actionName = attrName.split('_', 2)
if actionName == name:
return getattr(self, attrName)
return None
def render_servicePasswords(self, ctx, data):
docFactory = loaders.xmlfile(
'change_service_passwords.xhtml',
templateDir=os.path.split(os.path.abspath(__file__))[0])
r = inevow.IQ(docFactory).onePattern('main')
return r
def render_hideIfNot(self, ctx, data):
if data:
return ctx.tag
else:
return tags.invisible()
def data_servicePasswords(self, ctx, data):
user = ctx.locate(inevow.ISession).getLoggedInRoot().loggedIn
config = interfaces.ILDAPConfig(ctx)
e=ldapsyntax.LDAPEntry(client=user.client, dn=config.getBaseDN())
d = e.search(filterObject=pureldap.LDAPFilter_and([
pureldap.LDAPFilter_equalityMatch(attributeDesc=pureldap.LDAPAttributeDescription('objectClass'),
assertionValue=pureldap.LDAPAssertionValue('serviceSecurityObject')),
pureldap.LDAPFilter_equalityMatch(attributeDesc=pureldap.LDAPAttributeDescription('owner'),
assertionValue=pureldap.LDAPAssertionValue(str(self.dn))),
pureldap.LDAPFilter_present('cn'),
]),
attributes=['cn'])
return d
def render_form_service(self, ctx, data):
# TODO error messages for one password change form display in
# all of them.
e = inevow.IData(ctx)
for name in self.listServicePasswordActions():
yield webform.renderForms('service_%s_%s' % (name, e.dn))[ctx.tag()]
def locateConfigurable(self, ctx, name):
try:
return super(ServicePasswordChangeMixin, self).locateConfigurable(ctx, name)
except AttributeError:
if name.startswith('service_'):
pass
else:
raise
rest = name[len('service_'):]
l = rest.split('_', 1)
if len(l) != 2:
raise AttributeError, name
c = self.getServicePasswordAction(l[0])
if c is None:
raise AttributeError, name
return iformless.IConfigurable(c(l[1]))
render_zebra = weave.zebra()
render_i18n = i18n.render()
class ConfirmChange(ServicePasswordChangeMixin, rend.Page):
implements(iskin.ISkinnable)
title = _('Ldaptor Password Change Page')
addSlash = True
docFactory = loaders.xmlfile(
'change_password.xhtml',
templateDir=os.path.split(os.path.abspath(__file__))[0])
def getBindingNames(self, ctx):
return ['setPassword', 'generateRandom']
def bind_setPassword(self, ctx):
return annotate.MethodBinding(
'setPassword',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
annotate.Argument('newPassword', annotate.PasswordEntry(required=True,
label=_('New password'))),
annotate.Argument('again', annotate.PasswordEntry(required=True,
label=_('Again'))),
],
label=_('Set password')),
action=_('Set password'))
def bind_generateRandom(self, ctx):
return annotate.MethodBinding(
'generateRandom',
annotate.Method(arguments=[
annotate.Argument('ctx', annotate.Context()),
],
label=_('Generate random')),
action=_('Generate random'))
servicePasswordAction_10_remove = RemoveServicePassword
servicePasswordAction_20_set = SetServicePassword
servicePasswordAction_30_random = SetRandomServicePassword
def _setPassword(self, ctx, password):
e = getEntry(ctx, self.dn)
d=defer.maybeDeferred(e.setPassword, newPasswd=password)
return d
def setPassword(self, ctx, newPassword, again):
d = defer.maybeDeferred(checkPasswordTypos, newPassword, again)
d.addCallback(lambda dummy: self._setPassword(ctx, newPassword))
d.addCallback(lambda dummy: _('Password set.'))
def eb(fail):
return _("Failed: %s") % fail.getErrorMessage()
d.addErrback(eb)
return d
def generateRandom(self, ctx):
d=generate_password.generate(reactor)
def _first(passwords):
assert len(passwords)==1
return passwords[0]
d.addCallback(_first)
def _status(newPassword, ctx):
d = self._setPassword(ctx, newPassword)
d.addCallback(lambda dummy: _('Password set to %s') % newPassword)
return d
d.addCallback(_status, ctx)
def eb(fail):
return _("Failed: %s") % fail.getErrorMessage()
d.addErrback(eb)
return d
def data_status(self, ctx, data):
try:
return ctx.locate(inevow.IStatusMessage)
except KeyError:
return ''
def data_dn(self, ctx, data):
return self.dn
def render_form(self, ctx, data):
return webform.renderForms()
def render_passthrough(self, ctx, data):
return ctx.tag.clear()[data]
def data_header(self, ctx, data):
u=url.URL.fromContext(ctx)
u=u.parentdir().parentdir().clear()
l=[]
l.append(tags.a(href=u.sibling("search"))[_("Search")])
l.append(tags.a(href=u.sibling("add"))[_("add new entry")])
l.append(tags.a(href=u.sibling("edit").child(str(self.dn)))[_("edit")])
l.append(tags.a(href=u.sibling("delete").child(str(self.dn)))[_("delete")])
return l
def render_add(self, ctx, data):
return webform.renderForms('add')
def configurable_add(self, ctx):
return AddService(self.dn)
render_i18n = i18n.render()
def render_data(self, ctx, data):
return ctx.tag.clear()[data]
class GetDN(rend.Page):
addSlash = True
def child_(self, ctx):
entry = inevow.ISession(ctx).getLoggedInRoot().loggedIn
u = inevow.IRequest(ctx).URLPath()
return u.child(str(entry.dn))
def childFactory(self, ctx, name):
unquoted=uriUnquote(name)
try:
dn = distinguishedname.DistinguishedName(stringValue=unquoted)
except distinguishedname.InvalidRelativeDistinguishedName, e:
# TODO There's no way to throw a FormException at this stage.
return None
r=ConfirmChange(dn=dn)
return r
def getResource():
return GetDN()
| antong/ldaptor | ldaptor/apps/webui/change_password.py | Python | lgpl-2.1 | 15,552 |
import gtk.glade
import sys, os
import os.path
import locale, gettext
locale.setlocale (locale.LC_NUMERIC, '')
import string
import sys
import re
GtkSV = True
try:
import gtksourceview
except:
try:
import gtksourceview2 as gtksourceview
except:
print 'No gtksourceview import possible. Please install gtksourceview2 for python!!'
GtkSV = False
from cuon.Windows.windows import windows
import cuon.Misc.cuon_dialog
class editorwindow(windows):
def __init__(self, dicFilename=None, servermod=False, prgmode = False):
windows.__init__(self)
self.close_dialog = None
self.clipboard = gtk.clipboard_get()
self.ModulNumber = 0
print dicFilename, servermod
self.openDB()
self.oUser = self.loadObject('User')
self.closeDB()
if servermod:
try:
self.xml = gtk.Builder()
self.xml.add_from_file('../usr/share/cuon/glade/editor.glade2')
self.xml.set_translation_domain('cuon')
except:
self.xml = gtk.glade.XML('../usr/share/cuon/glade/editor.glade2')
else:
if prgmode:
self.loadGlade('prgeditor.xml')
else:
self.loadGlade('editor.xml')
self.setXmlAutoconnect()
self.win1 = self.getWidget('EditorMainwindow')
if prgmode:
pass
else:
if GtkSV:
self.textbuffer, self.view = self.getNotesEditor(mime_type = 'text/x-ini-file')
self.view.set_indent_on_tab(True)
self.view.set_auto_indent(True)
Vbox = self.getWidget('vbox1')
Scrolledwindow = self.getWidget('scrolledwindow1')
Scrolledwindow.remove(self.getWidget('viewport1'))
#Vbox.remove(oldScrolledwindow)
#Vbox.add(self.view)
#Vbox.show_all()
Scrolledwindow.add(self.view)
self.view.show_all()
Scrolledwindow.show_all()
else:
self.textbuffer = self.getWidget('tv1').get_buffer()
self.actualTab = 0
if dicFilename:
self.dicCurrentFilename = dicFilename
self.open_file(dicFilename)
else:
self.dicCurrentFilename = {'TYPE':'FILE','NAME':'./new.txt'}
def on_quit1_activate(self, event):
print 'quit editor'
os.system('find -name "tmp_editor_ssh_tab_*" -exec rm {} \;')
self.closeWindow()
def on_save1_activate(self, event):
self.save_current_file()
def get_text(self):
"Returns the current text in the text area"
return self.textbuffer.get_text(self.textbuffer.get_start_iter(), self.textbuffer.get_end_iter())
def delete(self, widget, data=None):
"Put up dialog box, asking user if they're sure they want to quit. If so, quit"
#Since we allow people to do other things while deciding whether to save
#we'd better not let them accumulate these dialog boxes
if self.close_dialog:
return True
if self.textview.get_buffer().get_modified():
self.close_dialog=gtk.Dialog("Save changes to file before closing?",
None,0,
("Close without saving",0,
gtk.STOCK_CANCEL,1,
gtk.STOCK_SAVE,2))
answer=self.close_dialog.run()
if answer==1:
self.close_dialog.destroy()
self.close_dialog=None
return True
if answer==2:
self.save_current_file()
Ged.number_of_instances = Ged.number_of_instances - 1
self.window.destroy()
if __name__ == "__main__":
if Ged.number_of_instances == 0:
gtk.main_quit()
return False
def open_item_clicked(self, data=None):
"Creates a file chooser and allows user to open a file with it"
self.filesel = gtk.FileSelection("Select file to open...")
self.filesel.ok_button.connect_object("clicked", self.open_selected_file, None)
self.filesel.cancel_button.connect("clicked", lambda w: self.filesel.destroy())
self.filesel.show()
def open_selected_file(self, data=None):
"Opens the selected file and reads it in"
self.open_file(self.filesel.get_filename())
#self.filesel.destroy()
def open_file(self, dicFilename):
"Opens the file given in filename and reads it in"
if dicFilename['TYPE'] == 'SSH':
dicFilename['TMPNAME'] = 'tmp_editor_ssh_tab_' + `self.actualTab`
s1 = 'scp -P ' + dicFilename['PORT'] + ' ' + dicFilename['USER'] + '@' + dicFilename['HOST'] + '://'
s1 += dicFilename['NAME'] + ' ' + dicFilename['TMPNAME']
os.system(s1)
filename = dicFilename['TMPNAME']
else:
filename = dicFilename['NAME']
infile = open(filename, "r")
if infile:
self.textbuffer.set_text(infile.read())
infile.close()
#self.dicCurrentFilename = filename
self.win1.set_title(self.dicCurrentFilename['NAME'])
#sSuffix = dicFilename['NAME'] [dicFilename['NAME'] .rfind('.')+1:len(dicFilename['NAME'] )]
#print 'SSuffix = ', sSuffix.lower()
#dicSuffix = {'xml':'application/xml'}
self.textbuffer = self.setTextBufferLanguage(self.textbuffer,self.checkMimeType(dicFilename['NAME']) , True)
def save_as_item_clicked(self, data=None):
"Creates a file chooser and allows the user to save to it"
self.filesel = gtk.FileSelection("Save As...")
self.filesel.ok_button.connect_object("clicked", self.save_selected_file, None)
self.filesel.cancel_button.connect("clicked", lambda w: self.filesel.destroy())
self.filesel.show()
def save_selected_file(self, data=None):
"Saves the selected file"
self.save_file(self.filesel.get_filename(), self.get_text())
self.dicCurrentFilename = self.filesel.get_filename()
self.window.set_title(self.dicCurrentFilename)
self.filesel.destroy()
def save_file(self, dicFilename, data):
"Saves the data to the file located by the filename"
print 'save this ', dicFilename
if dicFilename['TYPE'] == 'SSH':
sFile = dicFilename['TMPNAME']
else:
sFile = dicFilename['NAME']
outfile = open(sFile, "w")
if outfile:
outfile.write(data)
outfile.close()
#mark as unmodified since last save
self.textbuffer.set_modified(False)
if dicFilename['TYPE'] == 'SSH':
os.system('cp -f ' + dicFilename['TMPNAME'] +' ' + os.path.basename(dicFilename['NAME'] ) )
s1 = 'scp -P ' + dicFilename['PORT'] + ' ' + os.path.basename(dicFilename['NAME'] ) + ' ' + dicFilename['USER'] + '@' + dicFilename['HOST'] + '://'
s1 += os.path.dirname(dicFilename['NAME'] )
print s1
os.system(s1)
os.system('rm ' + os.path.basename(dicFilename['NAME'] ) )
print 'Files saved'
def save_current_file(self, data=None):
"Saves the text to the current file"
if self.dicCurrentFilename['NAME'] != "/new":
self.save_file(self.dicCurrentFilename, self.get_text())
else:
self.save_as_item_clicked()
# edit menu
def on_undo1_activate(self, event):
self.textbuffer.undo ()
def on_redo1_activate(self, event):
self.textbuffer.redo ()
def on_cut1_activate(self, event):
self.textbuffer.cut_clipboard(self.clipboard, self.view.get_editable())
def on_copy1_activate(self, event):
self.textbuffer.copy_clipboard(self.clipboard)
def on_paste1_activate(self, event):
self.textbuffer.paste_clipboard(self.clipboard,None, self.view.get_editable())
def on_quicksearch_activate(self, event):
print 'quicksearch'
cd = cuon.Misc.cuon_dialog.cuon_dialog()
ok, self.findValue = cd.inputLine( 'Quick Search', 'Search for this word')
print ok, self.findValue
if self.findValue:
position = self.searchText(self.findValue, None)
def on_search_again(self, event):
position = self.searchText(self.findValue, self.match_end)
# Menu Tools
def on_xml_beautifier_activate(self, event):
self.xmlBeautifier(self.get_text())
# toolbar buttons
def on_tbQuit_clicked(self, event):
self.activateClick('quit1')
def on_tbNew_clicked(self, event):
self.activateClick('new1')
def on_tbOpen_clicked(self, event):
self.activateClick('open1')
def on_tbSave_clicked(self, event):
self.activateClick('save1')
def on_tbUndo_clicked(self, event):
self.activateClick('undo1')
def on_tbRedo_clicked(self, event):
self.activateClick('redo1')
def on_tbCut_clicked(self, event):
self.activateClick('cut1')
def on_tbCopy_clicked(self, event):
self.activateClick('copy1')
def on_tbPaste_clicked(self, event):
self.activateClick('paste1')
def on_tbQuickSearch_clicked(self, event):
print 'tb search'
self.activateClick('QuickSearch')
def on_tbFindNext_clicked(self, event):
print 'tb search again'
self.activateClick('FindNext')
def searchText(self, sFind, iter = None):
if not iter:
start_iter = self.textbuffer.get_start_iter()
else:
start_iter = iter
position = gtksourceview.iter_forward_search(start_iter, sFind, gtksourceview.SEARCH_CASE_INSENSITIVE)
try:
print position
self.match_start, self.match_end = position
self.textbuffer.place_cursor(self.match_start)
self.textbuffer.select_range(self.match_start, self.match_end)
self.view.scroll_to_iter(self.match_start, 0.0)
except:
self.match_start = None
self.findValue = None
return position
def plugin_item_clicked(self, data=None):
"Creates a file chooser and allows user to open a file with it"
self.filesel = gtk.FileSelection("Select plugin to open...")
self.filesel.ok_button.connect_object("clicked", self.open_selected_plugin, None)
self.filesel.cancel_button.connect("clicked", lambda w: self.filesel.destroy())
self.filesel.show()
def setLanguage(self, mType):
manager = self.textbuffer.get_data('languages-manager')
#print manager.get_available_languages()
mime_type = mType
language = manager.get_language_from_mime_type(mime_type)
self.textbuffer.set_highlight(True)
self.textbuffer.set_language(language)
def open_selected_plugin(self, data=None):
"Opens the selected plugin file and reads it in"
self.open_plugin(self.filesel.get_filename())
self.filesel.destroy()
def open_plugin(self, filename):
"Opens the file given in filename and reads it in"
infile = open(filename, "r")
if infile:
command = infile.read()
infile.close()
exec(command)
def enable_wrap(self,data=None):
"Enables word wrap and changes menu item appropriately"
self.textview.set_wrap_mode(gtk.WRAP_WORD)
self.word_wrap_item.destroy()
self.word_wrap_item=gtk.MenuItem("Disable _Word Wrap")
self.word_wrap_item.connect_object("activate",self.disable_wrap,None)
self.edit_menu.append(self.word_wrap_item)
self.word_wrap_item.show()
def disable_wrap(self,data=None):
"Disables word wrap and changes menu item appropriately"
self.textview.set_wrap_mode(gtk.WRAP_NONE)
self.word_wrap_item.destroy()
self.word_wrap_item=gtk.MenuItem("Enable _Word Wrap")
self.word_wrap_item.connect_object("activate",self.enable_wrap,None)
self.edit_menu.append(self.word_wrap_item)
self.word_wrap_item.show()
def xmlBeautifier(self, data):
preserveCDATA = 1
intendCols = 4
fields = re.split('(<.*?>)',data)
level = 0
cdataFlag=0
s = ''
if len(fields) > 2:
for f in fields:
if f.strip() == '': continue
if preserveCDATA :
# rejoin splitted CDATA-Tags which contains HTML-Tags
if f[:8] == '' :
cdataFlag=0
s += ' '*(level*intendCols) + cdata + '\n'
continue
if f[0]=='<' and f[1] != '/' and f[1] != '!' :
s += ' '*(level*intendCols) + f + '\n'
level = level + 1
if f[-2:] == '/>':
level = level - 1
elif f[:2]=='</':
level = level - 1
s += ' '*(level*intendCols) + f + '\n'
else:
s += ' '*(level*intendCols) + f + '\n'
self.textbuffer.set_text(s)
return True
| BackupTheBerlios/cuon-svn | cuon_client/cuon/Editor/editor.py | Python | gpl-3.0 | 14,061 |
"""Component for handling Air Quality data for your location."""
from datetime import timedelta
import logging
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
)
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
_LOGGER = logging.getLogger(__name__)
ATTR_AQI = "air_quality_index"
ATTR_CO2 = "carbon_dioxide"
ATTR_CO = "carbon_monoxide"
ATTR_N2O = "nitrogen_oxide"
ATTR_NO = "nitrogen_monoxide"
ATTR_NO2 = "nitrogen_dioxide"
ATTR_OZONE = "ozone"
ATTR_PM_0_1 = "particulate_matter_0_1"
ATTR_PM_10 = "particulate_matter_10"
ATTR_PM_2_5 = "particulate_matter_2_5"
ATTR_SO2 = "sulphur_dioxide"
DOMAIN = "air_quality"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=30)
PROP_TO_ATTR = {
"air_quality_index": ATTR_AQI,
"attribution": ATTR_ATTRIBUTION,
"carbon_dioxide": ATTR_CO2,
"carbon_monoxide": ATTR_CO,
"nitrogen_oxide": ATTR_N2O,
"nitrogen_monoxide": ATTR_NO,
"nitrogen_dioxide": ATTR_NO2,
"ozone": ATTR_OZONE,
"particulate_matter_0_1": ATTR_PM_0_1,
"particulate_matter_10": ATTR_PM_10,
"particulate_matter_2_5": ATTR_PM_2_5,
"sulphur_dioxide": ATTR_SO2,
}
async def async_setup(hass, config):
"""Set up the air quality component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class AirQualityEntity(Entity):
"""ABC for air quality data."""
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
raise NotImplementedError()
@property
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return None
@property
def particulate_matter_0_1(self):
"""Return the particulate matter 0.1 level."""
return None
@property
def air_quality_index(self):
"""Return the Air Quality Index (AQI)."""
return None
@property
def ozone(self):
"""Return the O3 (ozone) level."""
return None
@property
def carbon_monoxide(self):
"""Return the CO (carbon monoxide) level."""
return None
@property
def carbon_dioxide(self):
"""Return the CO2 (carbon dioxide) level."""
return None
@property
def attribution(self):
"""Return the attribution."""
return None
@property
def sulphur_dioxide(self):
"""Return the SO2 (sulphur dioxide) level."""
return None
@property
def nitrogen_oxide(self):
"""Return the N2O (nitrogen oxide) level."""
return None
@property
def nitrogen_monoxide(self):
"""Return the NO (nitrogen monoxide) level."""
return None
@property
def nitrogen_dioxide(self):
"""Return the NO2 (nitrogen dioxide) level."""
return None
@property
def state_attributes(self):
"""Return the state attributes."""
data = {}
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value is not None:
data[attr] = value
return data
@property
def state(self):
"""Return the current state."""
return self.particulate_matter_2_5
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
| partofthething/home-assistant | homeassistant/components/air_quality/__init__.py | Python | apache-2.0 | 3,964 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import rest.models
from .base import from_django_model
WsAuthGroup = from_django_model(rest.models.WsAuthGroup)
| lavalamp-/ws-backend-community | lib/sqlalchemy/models/auth.py | Python | gpl-3.0 | 178 |
import unittest
from fylm.model.timestamp import Timestamps
class MockExperiment(object):
def __init__(self):
self.data_dir = "/tmp/"
self.fields_of_view = [1, 2]
self.time_periods = [1, 2]
self.base_path = None
self.field_of_view_count = 2
class TimestampsTests(unittest.TestCase):
def setUp(self):
self.t = Timestamps()
def test_parse_line(self):
index, timestamp = self.t._parse_line("238 4.5356246")
self.assertEqual(index, 238)
self.assertAlmostEqual(timestamp, 4.5356246)
def test_parse_line_invalid(self):
with self.assertRaises(AttributeError):
index, timestamp = self.t._parse_line("238")
def test_parse_line_empty(self):
with self.assertRaises(AttributeError):
index, timestamp = self.t._parse_line("")
def test_load(self):
data = ["1 2.222", "2 4.444", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_load_one_bad(self):
data = ["1 2.222", "2 4.444", "Horrendous Surprise!", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_load_one_invalid_type(self):
data = ["1 2.222", "2 Fourpointfourseconds", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_lines(self):
self.t._timestamps = {3: 8.888, 1: 2.222, 2: 4.444}
lines = sorted(list(self.t.lines))
self.assertListEqual(["1 2.222", "2 4.444", "3 8.888"], lines)
def test_add(self):
self.t.add("2.222")
self.t.add("4.444")
self.t.add("8.888")
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
self.t.add("12.222")
expected = {1: 2.222, 2: 4.444, 3: 8.888, 4: 12.222}
self.assertDictEqual(self.t._timestamps, expected)
def test_last(self):
self.t._timestamps = {3: 8.888, 1: 2.222, 2: 4.444}
self.assertEqual(self.t.last, 8.888)
def test_last_none(self):
self.t._timestamps = {}
with self.assertRaises(ValueError):
last = self.t.last | jimrybarski/fylm | tests/model/timestamp.py | Python | mit | 2,372 |
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports========================
from __future__ import absolute_import
import os
from traits.api import (
Enum,
Float,
Event,
Property,
Int,
Button,
Bool,
Str,
Any,
on_trait_change,
String,
)
from traitsui.api import (
View,
HGroup,
Item,
Group,
VGroup,
EnumEditor,
RangeEditor,
ButtonEditor,
spring,
)
# from pyface.timer.api import Timer
# =============standard library imports ========================
# import sys, os
# =============local library imports ==========================
# sys.path.insert(0, os.path.join(os.path.expanduser('~'),
# 'Programming', 'mercurial', 'pychron_beta'))
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.graph.time_series_graph import TimeSeriesStreamStackedGraph
from pychron.graph.plot_record import PlotRecord
import time
from pychron.hardware.core.core_device import CoreDevice
from pychron.hardware.meter_calibration import MeterCalibration
from pychron.core.helpers.filetools import parse_file
from pychron.hardware.watlow.base_ezzone import BaseWatlowEZZone
class WatlowEZZone(BaseWatlowEZZone, CoreDevice):
"""
WatlowEZZone represents a WatlowEZZone PM PID controller.
this class provides human readable methods for setting the modbus registers
"""
graph_klass = TimeSeriesStreamStackedGraph
refresh = Button
autotune = Event
autotune_label = Property(depends_on="autotuning")
autotuning = Bool
configure = Button
# ===============================================================================
# handlers
# ===============================================================================
def _autotune_fired(self):
if self.autotuning:
self.stop_autotune()
else:
self.start_autotune()
self.autotuning = not self.autotuning
def _configure_fired(self):
self.edit_traits(view="autotune_configure_view")
def _refresh_fired(self):
self.initialization_hook()
def graph_builder(self, g, **kw):
g.new_plot(padding_left=40, padding_right=5, zoom=True, pan=True, **kw)
g.new_plot(padding_left=40, padding_right=5, zoom=True, pan=True, **kw)
g.new_series()
g.new_series(plotid=1)
g.set_y_title("Temp (C)")
g.set_y_title("Heat Power (%)", plotid=1)
def get_control_group(self):
closed_grp = VGroup(
HGroup(
Item(
"use_pid_bin",
label="Set PID",
tooltip="Set PID parameters based on setpoint",
),
Item("use_calibrated_temperature", label="Use Calibration"),
Item(
"coeff_string",
show_label=False,
enabled_when="use_calibrated_temperature",
),
),
Item(
"closed_loop_setpoint",
style="custom",
label="setpoint",
editor=RangeEditor(
mode="slider",
format="%0.2f",
low_name="setpointmin",
high_name="setpointmax",
),
),
visible_when='control_mode=="closed"',
)
open_grp = VGroup(
Item(
"open_loop_setpoint",
label="setpoint",
editor=RangeEditor(
mode="slider", format="%0.2f", low_name="olsmin", high_name="olsmax"
),
visible_when='control_mode=="open"',
)
)
tune_grp = HGroup(
Item("enable_tru_tune"),
Item(
"tru_tune_gain",
label="Gain",
tooltip="1:Most overshot, 6:Least overshoot",
),
)
cg = VGroup(
HGroup(
Item("control_mode", editor=EnumEditor(values=["closed", "open"])),
Item("max_output", label="Max Output %", format_str="%0.1f"),
icon_button_editor("advanced_values_button", "cog"),
),
tune_grp,
closed_grp,
open_grp,
)
return cg
def _advanced_values_button_fired(self):
self.edit_traits(view="configure_view")
def get_configure_group(self):
""" """
output_grp = VGroup(
Item("output_scale_low", format_str="%0.3f", label="Scale Low"),
Item("output_scale_high", format_str="%0.3f", label="Scale High"),
label="Output",
show_border=True,
)
autotune_grp = HGroup(
Item(
"autotune",
show_label=False,
editor=ButtonEditor(label_value="autotune_label"),
),
Item("configure", show_label=False, enabled_when="not autotuning"),
label="Autotune",
show_border=True,
)
input_grp = Group(
VGroup(
Item(
"sensor1_type",
# editor=EnumEditor(values=sensor_map),
show_label=False,
),
Item(
"thermocouple1_type",
# editor=EnumEditor(values=tc_map),
show_label=False,
visible_when="_sensor1_type==95",
),
Item(
"input_scale_low",
format_str="%0.3f",
label="Scale Low",
visible_when="_sensor1_type in [104,112]",
),
Item(
"input_scale_high",
format_str="%0.3f",
label="Scale High",
visible_when="_sensor1_type in [104,112]",
),
),
label="Input",
show_border=True,
)
pid_grp = VGroup(
HGroup(Item("Ph", format_str="%0.3f"), Item("Pc", format_str="%0.3f")),
Item("I", format_str="%0.3f"),
Item("D", format_str="%0.3f"),
show_border=True,
label="PID",
)
return Group(
HGroup(spring, Item("refresh", show_label=False)),
autotune_grp,
HGroup(output_grp, input_grp),
pid_grp,
)
def autotune_configure_view(self):
v = View(
"autotune_setpoint",
Item("autotune_aggressiveness", label="Aggressiveness"),
VGroup(
"enable_tru_tune",
Group(
Item("tru_tune_band", label="Band"),
Item(
"tru_tune_gain",
label="Gain",
tooltip="1:Most overshot, 6:Least overshoot",
),
enabled_when="enable_tru_tune",
),
show_border=True,
label="TRU-TUNE+",
),
title="Autotune Configuration",
kind="livemodal",
)
return v
def control_view(self):
return View(self.get_control_group())
def configure_view(self):
return View(self.get_configure_group())
def _get_autotune_label(self):
return "On" if self.autotuning else "Off"
if __name__ == "__main__":
from pychron.core.helpers.logger_setup import logging_setup
logging_setup("watlowezzone")
w = WatlowEZZone(name="temperature_controller", configuration_dir_name="diode")
w.bootstrap()
w.configure_traits(view="configure_view")
# ============================== EOF ==========================
| USGSDenverPychron/pychron | pychron/hardware/watlow/watlow_ezzone.py | Python | apache-2.0 | 8,596 |
import re
import random
from discord.ext import commands
from . import util
table_expression = re.compile(r'^\s*(?:(\d+)\s*\|\s*)?(.*)\s*$')
class TableCategory (util.Cog):
@commands.command()
async def choose(self, ctx, *, table: str):
'''
Randomly choose an item from a list
This command has special formatting:
Starting on the line after the command name, each line is an item
Items can have a 'weight' by putting a number followed by a pipe `|` at the beginning of the line
Lines without a specified weight have a weight of 1
e.x. (would have a 25% chance of Item One, a 50% chance of Item Two, and a 25% chance of Item Three):
;choose
Item one
2 | Item two
Item three
'''
options = []
for line in table.splitlines():
match = table_expression.match(line)
if match:
count, item = match.groups()
count = 1 if count is None else int(count)
options.extend([item] * count)
else:
raise Exception('Misformatted item: {}'.format(line))
final = random.choice(options)
await util.send_embed(ctx, author=False, fields=[('Randomly chosen:', final)])
def setup(bot):
bot.add_cog(TableCategory(bot))
| b-hodges/dice-bot | dicebot/cogs/tables.py | Python | mit | 1,339 |
# -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
class SuccessDeleteMessageMixin(SuccessMessageMixin):
"""
Mixin criado para propagar mensagem de sucesso em
ações de exclusão.
"""
def get_success_message(self):
return self.success_message
def delete(self, request, *args, **kwargs):
success_message = self.get_success_message()
if success_message:
messages.success(request, success_message)
return super(SuccessDeleteMessageMixin, self).delete(request,
*args, **kwargs)
| gilsondev/balin | balin/utils/views/mixins.py | Python | mit | 681 |
"""add-hikes
Revision ID: f7888bd46c75
Revises: 820bb005f2c5
Create Date: 2017-02-16 07:36:06.108806
"""
# revision identifiers, used by Alembic.
revision = 'f7888bd46c75'
down_revision = 'fc92ba2ffd7f'
from alembic import op
import sqlalchemy as sa
import geoalchemy2
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('hike_destination',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('altitude', sa.Integer(), nullable=True),
sa.Column('high_point_coord', geoalchemy2.types.Geometry(geometry_type='POINT'), nullable=False),
sa.Column('is_summit', sa.Boolean(), server_default='t', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text(u'now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('hike',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('destination_id', sa.Integer(), nullable=False),
sa.Column('datetime', sa.DateTime(), server_default=sa.text(u'now()'), nullable=False),
sa.Column('method', sa.String(length=30), nullable=False),
sa.Column('notes', sa.Text(), server_default='', nullable=False),
sa.CheckConstraint(u"method in ('ski', 'foot', 'crampons', 'climb', 'via ferrata')"),
sa.ForeignKeyConstraint(['destination_id'], [u'hike_destination.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('hike')
op.drop_table('hike_destination')
### end Alembic commands ###
| thusoy/blag | blag/migrations/versions/f7888bd46c75_add_hikes.py | Python | mit | 1,664 |
import numpy as np
| fourtriple/bt-dist | trilateration.py | Python | gpl-3.0 | 22 |
from os import listdir
from os.path import isfile, join
import paer
mypath = 'aedat/'
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) and f.endswith('.aedat')]
for file in onlyfiles:
ae = paer.aefile(mypath + str(file))
aed= paer.aedata(ae).downsample((16,16))
paer.create_pngs(aed, '16x16_' + str(file) + '_',path='more_images/temp',step=3000, dim=(16,16))
| darioml/fyp-public | data/make_pngs.py | Python | gpl-2.0 | 394 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Copyright 2004 Duke University
# Written by Seth Vidal <skvidal at phy.duke.edu>
"""
Classes and functions dealing with rpm package representations.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import misc
import re
import fnmatch
def buildPkgRefDict(pkgs, casematch=True):
"""take a list of pkg objects and return a dict the contains all the possible
naming conventions for them eg: for (name,i386,0,1,1)
dict[name] = (name, i386, 0, 1, 1)
dict[name.i386] = (name, i386, 0, 1, 1)
dict[name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-1] = (name, i386, 0, 1, 1)
dict[name-1-1] = (name, i386, 0, 1, 1)
dict[0:name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-0:1-1.i386] = (name, i386, 0, 1, 1)
"""
pkgdict = {}
for pkg in pkgs:
(n, a, e, v, r) = pkg.pkgtup
if not casematch:
n = n.lower()
a = a.lower()
e = e.lower()
v = v.lower()
r = r.lower()
name = n
nameArch = '%s.%s' % (n, a)
nameVerRelArch = '%s-%s-%s.%s' % (n, v, r, a)
nameVer = '%s-%s' % (n, v)
nameVerRel = '%s-%s-%s' % (n, v, r)
envra = '%s:%s-%s-%s.%s' % (e, n, v, r, a)
nevra = '%s-%s:%s-%s.%s' % (n, e, v, r, a)
for item in [name, nameArch, nameVerRelArch, nameVer, nameVerRel, envra, nevra]:
if item not in pkgdict:
pkgdict[item] = []
pkgdict[item].append(pkg)
return pkgdict
def parsePackages(pkgs, usercommands, casematch=0):
"""matches up the user request versus a pkg list:
for installs/updates available pkgs should be the 'others list'
for removes it should be the installed list of pkgs
takes an optional casematch option to determine if case should be matched
exactly. Defaults to not matching."""
pkgdict = buildPkgRefDict(pkgs, bool(casematch))
exactmatch = set()
matched = set()
unmatched = set()
for command in usercommands:
if not casematch:
command = command.lower()
if command in pkgdict:
exactmatch.update(pkgdict[command])
del pkgdict[command]
else:
# anything we couldn't find a match for
# could mean it's not there, could mean it's a wildcard
if misc.re_glob(command):
trylist = pkgdict.keys()
# command and pkgdict are already lowered if not casematch
# so case sensitive is always fine
restring = fnmatch.translate(command)
regex = re.compile(restring)
foundit = 0
for item in trylist:
if regex.match(item):
matched.update(pkgdict[item])
del pkgdict[item]
foundit = 1
if not foundit:
unmatched.add(command)
else:
unmatched.add(command)
return exactmatch, matched, unmatched
| jsilhan/dnf | dnf/yum/packages.py | Python | gpl-2.0 | 3,808 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: ts=4
###
#
# CommieCC is the legal property of J. Félix Ontañón <felixonta@gmail.com>
# Copyright (c) 2009 J. Félix Ontañón
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
###
import gobject
import time
import datetime
import logging
import pwd
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from utils import LOG_MAIN
PROP_IFACE_URI = 'org.freedesktop.DBus.Properties'
NM_URI = 'org.freedesktop.NetworkManager'
NM_PATH = '/org/freedesktop/NetworkManager'
NM_IFACE_URI = NM_URI
NM_ACTIVE_CONN_IFACE_URI = NM_URI + '.Connection.Active'
NM_DEVICE_IFACE_URI = NM_URI + '.Device'
NM_WIRED_IFACE_URI = NM_DEVICE_IFACE_URI + '.Wired'
NM_WIRELESS_IFACE_URI = NM_DEVICE_IFACE_URI + '.Wireless'
CK_URI = 'org.freedesktop.ConsoleKit'
CK_PATH = '/org/freedesktop/ConsoleKit'
CK_MANAGER_URI = CK_URI + '.Manager'
CK_MANAGER_PATH = CK_PATH + '/Manager'
SEAT_IFACE_URI = 'org.freedesktop.ConsoleKit.Seat'
SESSION_IFACE_URI = 'org.freedesktop.ConsoleKit.Session'
DBusGMainLoop(set_as_default=True)
class EventDispatcher(gobject.GObject):
SIGNAL = (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,))
SIGNAL3 = (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING))
__gsignals__ = {
'new_x11_session' : SIGNAL3,
'losed_x11_session' : SIGNAL3,
'connection_changed': SIGNAL,
'locking_changed': SIGNAL,
}
def __init__(self):
gobject.GObject.__init__(self)
Dispatcher = EventDispatcher()
class NMController():
def __init__(self):
self.logger = logging.getLogger(LOG_MAIN)
self.active_ifaces = self.get_active_ifaces()
bus = dbus.SystemBus()
bus.add_signal_receiver(self.__prop_changed, 'PropertiesChanged',
NM_ACTIVE_CONN_IFACE_URI)
def __prop_changed(self, props):
if (props.has_key('State') and props['State'] == 2) or \
(props.has_key('Default') and props['Default']):
current_ifaces = self.get_active_ifaces()
if current_ifaces != self.active_ifaces:
self.active_ifaces = current_ifaces
self.logger.info('New connection available')
Dispatcher.emit('connection_changed', self.active_ifaces)
@staticmethod
def get_active_ifaces():
iface = {}
bus = dbus.SystemBus()
nm_iface = dbus.Interface(bus.get_object(NM_URI, NM_PATH),
dbus_interface=PROP_IFACE_URI)
active_conns_path = nm_iface.Get(NM_IFACE_URI, 'ActiveConnections')
for active_conn_path in active_conns_path:
active_conn_iface = dbus.Interface(bus.get_object(NM_URI,
active_conn_path), dbus_interface=PROP_IFACE_URI)
active_devs_path = active_conn_iface.Get(NM_ACTIVE_CONN_IFACE_URI,
'Devices')
for active_dev_path in active_devs_path:
active_dev_iface = dbus.Interface(bus.get_object(NM_URI,
active_dev_path), dbus_interface=PROP_IFACE_URI)
devicetype = active_dev_iface.Get(NM_DEVICE_IFACE_URI, 'DeviceType')
interface = active_dev_iface.Get(NM_DEVICE_IFACE_URI, 'Interface')
#Wired
if devicetype == 1:
hwaddr = active_dev_iface.Get(NM_WIRED_IFACE_URI, 'HwAddress')
#Wireless
elif devicetype == 2:
hwaddr = active_dev_iface.Get(NM_WIRELESS_IFACE_URI, 'HwAddress')
iface[str(interface)] = str(hwaddr)
return iface
@staticmethod
def get_all_macs():
iface = []
bus = dbus.SystemBus()
nm_iface = dbus.Interface(bus.get_object(NM_URI, NM_PATH),
dbus_interface=NM_URI)
devices_path = nm_iface.GetDevices()
for device_path in devices_path:
device_iface = dbus.Interface(bus.get_object(NM_URI,
device_path), dbus_interface=PROP_IFACE_URI)
devicetype = device_iface.Get(NM_DEVICE_IFACE_URI, 'DeviceType')
interface = device_iface.Get(NM_DEVICE_IFACE_URI, 'Interface')
#Wired
if devicetype == 1:
hwaddr = device_iface.Get(NM_WIRED_IFACE_URI, 'HwAddress')
#Wireless
elif devicetype == 2:
hwaddr = device_iface.Get(NM_WIRELESS_IFACE_URI, 'HwAddress')
iface.append(str(hwaddr))
return iface
class CKController():
def __init__(self):
self.active_sessions = {}
self.bus = dbus.SystemBus()
self.logger = logging.getLogger(LOG_MAIN)
ck_manager = dbus.Interface(self.bus.get_object(CK_URI, CK_MANAGER_PATH),
dbus_interface=CK_MANAGER_URI)
for seat_path in ck_manager.GetSeats():
seat = dbus.Interface(self.bus.get_object(CK_URI, seat_path),
dbus_interface=SEAT_IFACE_URI)
seat.connect_to_signal('SessionAdded', self.new_session)
seat.connect_to_signal('SessionRemoved', self.closed_session)
for session_path in seat.GetSessions():
session = dbus.Interface(self.bus.get_object(CK_URI, session_path),
dbus_interface=SESSION_IFACE_URI)
ret = self.local_x11_user(session)
if ret:
username, creation_time, display = [str(x) for x in ret]
Dispatcher.emit('new_x11_session', username, creation_time,
display)
self.active_sessions[session_path] = (username, creation_time,
display)
def local_x11_user(self, session):
if session.IsLocal() and session.GetX11Display():
pwduser = pwd.getpwuid(session.GetUnixUser())
username = pwduser.pw_name
creation_time = session.GetCreationTime()
display = session.GetX11Display()
return username, creation_time, display
return None
def new_session(self, session_path):
session = dbus.Interface(self.bus.get_object(CK_URI, session_path),
dbus_interface=SESSION_IFACE_URI)
ret = self.local_x11_user(session)
if ret:
username, creation_time, display = [str(x) for x in ret]
Dispatcher.emit('new_x11_session', username, creation_time, display)
self.logger.info('New x11 session found on %s', display)
self.active_sessions[session_path] = (username, creation_time,
display)
def closed_session(self, session_path):
if self.active_sessions.has_key(session_path):
username, creation_time, display = self.active_sessions.pop(session_path)
Dispatcher.emit('losed_x11_session', username, creation_time, display)
self.logger.info('Closed x11 session on %s', display)
else:
pass
if __name__ == '__main__':
def new_x11(dispatcher, data):
print 'Discovered:', data
def losed_x11(dispatcher, data):
print 'Losed:', data
def changed_connection(dispatcher, data):
print 'Connections', data
nm = NMController()
ck = CKController()
print ck.active_sessions
print NMController.get_active_ifaces()
Dispatcher.connect('new_x11_session', new_x11)
Dispatcher.connect('losed_x11_session', losed_x11)
Dispatcher.connect('connection_changed', changed_connection)
gobject.MainLoop().run()
| amian84/commiecc | commiecc/slavelib/core.py | Python | gpl-3.0 | 8,617 |
# -*- coding: utf-8 -*-
# Copyright(C) 2017 Vincent A
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from weboob.tools.backend import Module
from weboob.capabilities.weather import CapWeather
from .browser import LameteoagricoleBrowser
__all__ = ['LameteoagricoleModule']
class LameteoagricoleModule(Module, CapWeather):
NAME = 'lameteoagricole'
DESCRIPTION = u'lameteoagricole website'
MAINTAINER = u'Vincent A'
EMAIL = 'dev@indigo.re'
LICENSE = 'AGPLv3+'
VERSION = '2.1'
BROWSER = LameteoagricoleBrowser
def iter_city_search(self, pattern):
return self.browser.iter_cities(pattern)
def get_current(self, city_id):
return self.browser.get_current(city_id)
def iter_forecast(self, city_id):
return self.browser.iter_forecast(city_id)
| laurentb/weboob | modules/lameteoagricole/module.py | Python | lgpl-3.0 | 1,524 |
''' -- Imports from python libraries -- '''
import datetime
import json
import pymongo
import re
''' -- imports from installed packages -- '''
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.shortcuts import render_to_response, redirect, render
from django.template import RequestContext
from django.template import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from mongokit import paginator
from gnowsys_ndf.ndf.org2any import org2html
from gnowsys_ndf.ndf.models import *
from pymongo import Connection
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
from gnowsys_ndf.ndf.views.methods import get_group_name_id
benchmark_collection = db[Benchmark.collection_name]
analytics_collection = db[Analytics.collection_name]
ins_objectid = ObjectId()
class activity_feed(Feed):
title_template = 'ndf/feed_updates_title.html'
description_template = 'ndf/feed_updates_description.html'
def title(self, obj):
group_name, group_id = get_group_name_id(obj['group_id'])
return "Updates for the group : "+ group_name+" @ MetaStudio"
def link(self, obj):
group_name, group_id = get_group_name_id(obj['group_id'])
return '/analytics/' + str(group_id) + '/summary/'
def description(self, obj):
group_name, group_id = get_group_name_id(obj['group_id'])
return "Changes and additions to group : " + group_name
author_name = 'MetaStudio'
author_link = 'http://metastudio.org'
feed_copyright = 'Ⓒ Homi Bhabha Centre for Science Education, TIFR'
def get_object(self, request, group_id) :
data = {}
data['group_id'] = group_id
return data
def get_context_data(self, **kwargs) :
context = super(activity_feed, self).get_context_data(**kwargs)
node = db['Nodes'].find_one({ "_id" : ObjectId(kwargs['item']['obj'][kwargs['item']['obj'].keys()[0]]['id'])})
try :
context['node'] = node
author = db['Nodes'].find_one({"_type" : "Author", "created_by" : node['created_by']})
try :
context['author'] = author
except :
pass
except :
pass
return context
def items(self, obj):
cursor = analytics_collection.find({"action.key" : { '$in' : ['create', 'edit']}, "group_id" : obj['group_id']}).sort("timestamp", -1)
return cursor
def item_link(self, item):
return "/"
def item_guid(self, item) :
return item['_id']
| AvadootNachankar/gstudio | gnowsys-ndf/gnowsys_ndf/ndf/views/feeds.py | Python | agpl-3.0 | 2,596 |
"""Test for RFLink cover components.
Test setup of RFLink covers component/platform. State tracking and
control of RFLink cover devices.
"""
import logging
from homeassistant.components.rflink import EVENT_BUTTON_PRESSED
from homeassistant.const import (
SERVICE_OPEN_COVER,
SERVICE_CLOSE_COVER,
STATE_OPEN,
STATE_CLOSED,
ATTR_ENTITY_ID,
)
from homeassistant.core import callback, State, CoreState
from tests.common import mock_restore_cache
from tests.components.rflink.test_init import mock_rflink
DOMAIN = "cover"
CONFIG = {
"rflink": {
"port": "/dev/ttyABC0",
"ignore_devices": ["ignore_wildcard_*", "ignore_cover"],
},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]},
"cover_0_0": {"name": "dim_test"},
"cover_0_1": {"name": "cover_test"},
},
},
}
_LOGGER = logging.getLogger(__name__)
async def test_default_setup(hass, monkeypatch):
"""Test all basic functionality of the RFLink cover component."""
# setup mocking rflink module
event_callback, create, protocol, _ = await mock_rflink(
hass, CONFIG, DOMAIN, monkeypatch
)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
# test default state of cover loaded from config
cover_initial = hass.states.get(DOMAIN + ".test")
assert cover_initial.state == STATE_CLOSED
assert cover_initial.attributes["assumed_state"]
# cover should follow state of the hardware device by interpreting
# incoming events for its name and aliases
# mock incoming command event for this device
event_callback({"id": "protocol_0_0", "command": "up"})
await hass.async_block_till_done()
cover_after_first_command = hass.states.get(DOMAIN + ".test")
assert cover_after_first_command.state == STATE_OPEN
# not sure why, but cover have always assumed_state=true
assert cover_after_first_command.attributes.get("assumed_state")
# mock incoming command event for this device
event_callback({"id": "protocol_0_0", "command": "down"})
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# should respond to group command
event_callback({"id": "protocol_0_0", "command": "allon"})
await hass.async_block_till_done()
cover_after_first_command = hass.states.get(DOMAIN + ".test")
assert cover_after_first_command.state == STATE_OPEN
# should respond to group command
event_callback({"id": "protocol_0_0", "command": "alloff"})
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test following aliases
# mock incoming command event for this device alias
event_callback({"id": "test_alias_0_0", "command": "up"})
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
# test changing state from HA propagates to RFLink
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0"
assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN"
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
assert protocol.send_command_ack.call_args_list[1][0][1] == "UP"
async def test_firing_bus_event(hass, monkeypatch):
"""Incoming RFLink command events should be put on the HA event bus."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {
"name": "test",
"aliases": ["test_alias_0_0"],
"fire_event": True,
}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
calls = []
@callback
def listener(event):
calls.append(event)
hass.bus.async_listen_once(EVENT_BUTTON_PRESSED, listener)
# test event for new unconfigured sensor
event_callback({"id": "protocol_0_0", "command": "down"})
await hass.async_block_till_done()
assert calls[0].data == {"state": "down", "entity_id": DOMAIN + ".test"}
async def test_signal_repetitions(hass, monkeypatch):
"""Command should be sent amount of configured repetitions."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"device_defaults": {"signal_repetitions": 3},
"devices": {
"protocol_0_0": {"name": "test", "signal_repetitions": 2},
"protocol_0_1": {"name": "test1"},
},
},
}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test if signal repetition is performed according to configuration
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
# wait for commands and repetitions to finish
await hass.async_block_till_done()
assert protocol.send_command_ack.call_count == 2
# test if default apply to configured devices
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test1"}
)
)
# wait for commands and repetitions to finish
await hass.async_block_till_done()
assert protocol.send_command_ack.call_count == 5
async def test_signal_repetitions_alternation(hass, monkeypatch):
"""Simultaneously switching entities must alternate repetitions."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "signal_repetitions": 2},
"protocol_0_1": {"name": "test1", "signal_repetitions": 2},
},
},
}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test1"}
)
)
await hass.async_block_till_done()
assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0"
assert protocol.send_command_ack.call_args_list[1][0][0] == "protocol_0_1"
assert protocol.send_command_ack.call_args_list[2][0][0] == "protocol_0_0"
assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1"
async def test_signal_repetitions_cancelling(hass, monkeypatch):
"""Cancel outstanding repetitions when state changed."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {"protocol_0_0": {"name": "test", "signal_repetitions": 3}},
},
}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DOMAIN + ".test"}
)
)
await hass.async_block_till_done()
assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN"
assert protocol.send_command_ack.call_args_list[1][0][1] == "UP"
assert protocol.send_command_ack.call_args_list[2][0][1] == "UP"
assert protocol.send_command_ack.call_args_list[3][0][1] == "UP"
async def test_group_alias(hass, monkeypatch):
"""Group aliases should only respond to group commands (allon/alloff)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "group_aliases": ["test_group_0_0"]}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test sending group command to group alias
event_callback({"id": "test_group_0_0", "command": "allon"})
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
# test sending group command to group alias
event_callback({"id": "test_group_0_0", "command": "down"})
await hass.async_block_till_done()
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
async def test_nogroup_alias(hass, monkeypatch):
"""Non group aliases should not respond to group commands."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"protocol_0_0": {
"name": "test",
"nogroup_aliases": ["test_nogroup_0_0"],
}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test sending group command to nogroup alias
event_callback({"id": "test_nogroup_0_0", "command": "allon"})
await hass.async_block_till_done()
# should not affect state
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test sending group command to nogroup alias
event_callback({"id": "test_nogroup_0_0", "command": "up"})
await hass.async_block_till_done()
# should affect state
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
async def test_nogroup_device_id(hass, monkeypatch):
"""Device id that do not respond to group commands (allon/alloff)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {"test_nogroup_0_0": {"name": "test", "group": False}},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test sending group command to nogroup
event_callback({"id": "test_nogroup_0_0", "command": "allon"})
await hass.async_block_till_done()
# should not affect state
assert hass.states.get(DOMAIN + ".test").state == STATE_CLOSED
# test sending group command to nogroup
event_callback({"id": "test_nogroup_0_0", "command": "up"})
await hass.async_block_till_done()
# should affect state
assert hass.states.get(DOMAIN + ".test").state == STATE_OPEN
async def test_restore_state(hass, monkeypatch):
"""Ensure states are restored on startup."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"RTS_12345678_0": {"name": "c1"},
"test_restore_2": {"name": "c2"},
"test_restore_3": {"name": "c3"},
"test_restore_4": {"name": "c4"},
},
},
}
mock_restore_cache(
hass, (State(DOMAIN + ".c1", STATE_OPEN), State(DOMAIN + ".c2", STATE_CLOSED))
)
hass.state = CoreState.starting
# setup mocking rflink module
_, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
state = hass.states.get(DOMAIN + ".c1")
assert state
assert state.state == STATE_OPEN
state = hass.states.get(DOMAIN + ".c2")
assert state
assert state.state == STATE_CLOSED
state = hass.states.get(DOMAIN + ".c3")
assert state
assert state.state == STATE_CLOSED
# not cached cover must default values
state = hass.states.get(DOMAIN + ".c4")
assert state
assert state.state == STATE_CLOSED
assert state.attributes["assumed_state"]
| fbradyirl/home-assistant | tests/components/rflink/test_cover.py | Python | apache-2.0 | 12,737 |
"""
Write a function to delete a node (except the tail) in a singly linked list, given only access to that node.
Supposed the linked list is 1 -> 2 -> 3 -> 4 and you are given the third node with value 3, the linked list should become 1 -> 2 -> 4 after calling your function.
"""
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
def deleteNode(self, node):
# to delete a node I need its prev node so we will
# swap the nodes values with its next one and delete
# its next node
node.val = node.next.val
node.next = node.next.next
| Ahmed--Mohsen/leetcode | delete_node_in_a_linked_list.py | Python | mit | 767 |
__version_info__ = {
'major': 0,
'minor': 1,
'micro': 0,
'releaselevel': 'alpha',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
| bruth/resources | resources/__init__.py | Python | bsd-2-clause | 580 |
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="cubehelix",
version="0.1.0",
author="James Davenport",
# author_email="",
description="Cubehelix colormaps for matplotlib",
long_description=read('README.md'),
# license="BSD",
py_modules=['cubehelix'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Scientific/Engineering :: Visualization",
# "License :: OSI Approved :: BSD License",
]
)
| jradavenport/cubehelix | setup.py | Python | bsd-2-clause | 605 |
import pytest
@pytest.fixture
def biosample(submitter, lab, award, source, organism):
return {
'award': award['uuid'],
'biosample_term_id': 'UBERON:349829',
'biosample_term_name': 'heart',
'biosample_type': 'tissue',
'lab': lab['uuid'],
'organism': organism['uuid'],
'source': source['uuid'],
}
@pytest.fixture
def biosample_depleted_in(mouse_biosample):
item = mouse_biosample.copy()
item.update({
'depleted_in_term_name': ['head'],
'biosample_term_name': 'multicellular organism',
"biosample_type": "whole organisms"
})
return item
@pytest.fixture
def biosample_starting_amount(biosample):
item = biosample.copy()
item.update({
'starting_amount': 20
})
return item
@pytest.fixture
def mouse_biosample(biosample, mouse):
item = biosample.copy()
item.update({
'organism': mouse['uuid'],
'model_organism_age': '8',
'model_organism_age_units': 'day',
'model_organism_sex': 'female',
'model_organism_health_status': 'apparently healthy',
'model_organism_mating_status': 'virgin'
})
return item
def test_biosample_depleted_in(testapp, biosample_depleted_in):
testapp.post_json('/biosample', biosample_depleted_in)
def test_biosample_depleted_in_name_required(testapp, biosample_depleted_in):
biosample_depleted_in.update({'depleted_in_term_id': ['UBERON:0000033']})
testapp.post_json('/biosample', biosample_depleted_in, status=422)
def test_biosample_depleted_in_type_whole_organism(testapp, biosample_depleted_in):
biosample_depleted_in['biosample_type'] = 'whole organism'
testapp.post_json('/biosample', biosample_depleted_in, status=422)
def test_biosample_starting_amount_fail(testapp, biosample_starting_amount):
testapp.post_json('/biosample', biosample_starting_amount, status=422)
def test_biosample_starting_amount_dep(testapp, biosample_starting_amount):
biosample_starting_amount['starting_amount'] = 40
biosample_starting_amount['starting_amount_units'] = 'cells'
testapp.post_json('/biosample', biosample_starting_amount)
def test_biosample_mouse_life_stage(testapp, mouse_biosample):
mouse_biosample['mouse_life_stage'] = 'adult'
testapp.post_json('/biosample', mouse_biosample)
def test_biosample_mouse_life_stage_fail(testapp, biosample):
biosample['mouse_life_stage'] = 'adult'
testapp.post_json('/biosample', biosample, status=422)
def test_biosample_model_organism_props_on_human_fail(testapp, mouse_biosample, human):
mouse_biosample['organism'] = human['uuid']
testapp.post_json('/biosample', mouse_biosample, status=422)
def test_biosample_human_post_synchronization_fail(testapp, biosample):
biosample['post_synchronization_time'] = '10'
biosample['post_synchronization_time_units'] = 'hour'
testapp.post_json('/biosample', biosample, status=422)
def test_biosample_mouse_post_synchronization_fail(testapp, mouse_biosample):
mouse_biosample['post_synchronization_time'] = '10'
mouse_biosample['post_synchronization_time_units'] = 'hour'
testapp.post_json('/biosample', mouse_biosample, status=422)
def test_biosample_mating_status_no_sex_fail(testapp, mouse_biosample):
del mouse_biosample['model_organism_sex']
mouse_biosample['model_organism_mating_status'] = 'mated'
testapp.post_json('/biosample', mouse_biosample, status=422)
def test_biosmple_post_synchronization_no_unit_fail(testapp, mouse_biosample, fly):
mouse_biosample['organism'] = fly['uuid']
mouse_biosample['post_synchronization_time'] = '30'
testapp.post_json('/biosample', mouse_biosample, status=422)
def test_biosample_human_whole_organism_fail(testapp, biosample):
biosample['biosample_type'] = 'whole organisms'
testapp.post_json('/biosample', biosample, status=422)
def test_alt_accession_ENCBS_regex(testapp, biosample):
bio = testapp.post_json('/biosample', biosample).json['@graph'][0]
res = testapp.patch_json(
bio['@id'],
{'status': 'replaced', 'alternate_accessions': ['ENCFF123ABC']}, expect_errors=True)
assert res.status_code == 422
res = testapp.patch_json(
bio['@id'],
{'status': 'replaced', 'alternate_accessions': ['ENCBS123ABC']})
assert res.status_code == 200
| T2DREAM/t2dream-portal | src/encoded/tests/test_schema_biosample.py | Python | mit | 4,368 |
class Solution:
def maxProduct(self, nums: List[int]) -> int:
local_min = nums[0]
local_max = nums[0]
global_max = nums[0]
# Maintain local max, local min and global max!
for i in range(1,len(nums)):
# Once we have encountered a negative element we consider it by multiplying it.
# print(local_min, local_max, global_max)
if nums[i] < 0:
local_max, local_min = local_min, local_max
local_min = min(nums[i], nums[i]*local_min)
local_max = max(nums[i], nums[i]*local_max)
global_max = max(global_max, local_max)
return global_max
| saisankargochhayat/algo_quest | leetcode/152.MaxProductSubarray/soln.py | Python | apache-2.0 | 734 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Jérémie DECOCK (http://www.jdhp.org)
# run:
# mpirun -np 4 python3 broadcast_pickle.py
# or
# mpiexec -n 4 python3 broadcast_pickle.py
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.rank
if rank == 0:
data = {'key1': [1, 2.5, 3], 'key2': ('hello', 'world')}
else:
data = None
data = comm.bcast(data, root=0)
print("process", rank, ":", data)
| jeremiedecock/snippets | python/mpi4py/broadcast_pickle.py | Python | mit | 444 |
# coding=utf-8
from __future__ import absolute_import
try:
# noinspection PyUnresolvedReferences
from argh.decorators import arg
argh_installed = True
except ImportError:
argh_installed = False
__author__ = 'Tyler Butler <tyler@tylerbutler.com>'
if argh_installed:
# noinspection PyUnresolvedReferences
from argh.helpers import set_default_command
_no_config_file_registry = []
config_file = arg('-s', '--config', '--settings',
dest='config_file',
default='config.yaml',
help="Specify a configuration file to use.")
verbose = arg('-v', '--verbose',
dest='verbose',
action='count',
default=0,
help="Display verbose output.")
def no_config_file(*args):
def wrapper(func):
_no_config_file_registry.append(func)
return func
return wrapper
def does_not_require_config_file(func):
return func in _no_config_file_registry
| tylerbutler/engineer | engineer/commands/argh_helpers.py | Python | mit | 1,055 |
import xyz
class Mpfr(xyz.Package):
pkg_name = 'mpfr'
deps = ['texinfo', 'gmp']
configure = xyz.Package.host_lib_configure
rules = Mpfr
| BreakawayConsulting/xyz | rules/mpfr.py | Python | mit | 150 |
"""The tests for the Tasmota sensor platform."""
import copy
import datetime
from datetime import timedelta
import json
from unittest.mock import Mock, patch
import hatasmota
from hatasmota.utils import (
get_topic_stat_status,
get_topic_tele_sensor,
get_topic_tele_will,
)
import pytest
from homeassistant import config_entries
from homeassistant.components import sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNKNOWN
from homeassistant.helpers import entity_registry as er
from homeassistant.util import dt
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.common import async_fire_mqtt_message, async_fire_time_changed
DEFAULT_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"DHT11": {"Temperature": None},
"TempUnit": "C",
}
}
BAD_INDEXED_SENSOR_CONFIG_3 = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"ApparentPower": [7.84, 1.23, 2.34],
},
}
}
INDEXED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"TotalStartTime": "2018-11-23T15:33:47",
"Total": 0.017,
"TotalTariff": [0.000, 0.017],
"Yesterday": 0.000,
"Today": 0.002,
"ExportActive": 0.000,
"ExportTariff": [0.000, 0.000],
"Period": 0.00,
"Power": 0.00,
"ApparentPower": 7.84,
"ReactivePower": -7.21,
"Factor": 0.39,
"Frequency": 50.0,
"Voltage": 234.31,
"Current": 0.039,
"ImportActive": 12.580,
"ImportReactive": 0.002,
"ExportReactive": 39.131,
"PhaseAngle": 290.45,
},
}
}
INDEXED_SENSOR_CONFIG_2 = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"TotalStartTime": "2018-11-23T15:33:47",
"Total": [0.000, 0.017],
"TotalTariff": [0.000, 0.017],
"Yesterday": 0.000,
"Today": 0.002,
"ExportActive": 0.000,
"ExportTariff": [0.000, 0.000],
"Period": 0.00,
"Power": 0.00,
"ApparentPower": 7.84,
"ReactivePower": -7.21,
"Factor": 0.39,
"Frequency": 50.0,
"Voltage": 234.31,
"Current": 0.039,
"ImportActive": 12.580,
"ImportReactive": 0.002,
"ExportReactive": 39.131,
"PhaseAngle": 290.45,
},
}
}
NESTED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-03-03T00:00:00+00:00",
"TX23": {
"Speed": {"Act": 14.8, "Avg": 8.5, "Min": 12.2, "Max": 14.8},
"Dir": {
"Card": "WSW",
"Deg": 247.5,
"Avg": 266.1,
"AvgCard": "W",
"Range": 0,
},
},
"SpeedUnit": "km/h",
}
}
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
entity_reg = er.async_get(hass)
entry = entity_reg.async_get("sensor.tasmota_dht11_temperature")
assert entry.disabled is False
assert entry.disabled_by is None
assert entry.entity_category is None
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"DHT11":{"Temperature":20.5}}'
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.5"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"DHT11":{"Temperature":20.0}}}',
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.0"
async def test_nested_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"TX23":{"Speed":{"Act":"12.3"}}}'
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "12.3"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"}}}}',
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "23.4"
async def test_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"TotalTariff":[1.2,3.4]}}'
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "3.4"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}',
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "7.8"
async def test_indexed_sensor_state_via_mqtt2(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT for sensor with last_reset property."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_total")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert (
state.attributes[sensor.ATTR_STATE_CLASS] == sensor.STATE_CLASS_TOTAL_INCREASING
)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_total")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/SENSOR",
'{"ENERGY":{"Total":1.2,"TotalStartTime":"2018-11-23T15:33:47"}}',
)
state = hass.states.get("sensor.tasmota_energy_total")
assert state.state == "1.2"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"Total":5.6,"TotalStartTime":"2018-11-23T16:33:47"}}}',
)
state = hass.states.get("sensor.tasmota_energy_total")
assert state.state == "5.6"
async def test_indexed_sensor_state_via_mqtt3(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT for indexed sensor with last_reset property."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG_2)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_total_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert (
state.attributes[sensor.ATTR_STATE_CLASS] == sensor.STATE_CLASS_TOTAL_INCREASING
)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_total_1")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/SENSOR",
'{"ENERGY":{"Total":[1.2, 3.4],"TotalStartTime":"2018-11-23T15:33:47"}}',
)
state = hass.states.get("sensor.tasmota_energy_total_1")
assert state.state == "3.4"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"Total":[5.6,7.8],"TotalStartTime":"2018-11-23T16:33:47"}}}',
)
state = hass.states.get("sensor.tasmota_energy_total_1")
assert state.state == "7.8"
async def test_bad_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT where sensor is not matching configuration."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(BAD_INDEXED_SENSOR_CONFIG_3)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":[1.2,3.4,5.6]}}'
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "1.2"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "3.4"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
# Test periodic state update with too few values
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":[7.8,9.0]}}'
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "7.8"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "9.0"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":2.3}}'
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "2.3"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "9.0"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"ApparentPower":[1.2,3.4,5.6]}}}',
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "1.2"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "3.4"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
# Test polled state update with too few values
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"ApparentPower":[7.8,9.0]}}}',
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "7.8"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "9.0"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"ApparentPower":2.3}}}',
)
state = hass.states.get("sensor.tasmota_energy_apparentpower_0")
assert state.state == "2.3"
state = hass.states.get("sensor.tasmota_energy_apparentpower_1")
assert state.state == "9.0"
state = hass.states.get("sensor.tasmota_energy_apparentpower_2")
assert state.state == "5.6"
@pytest.mark.parametrize("status_sensor_disabled", [False])
async def test_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
entity_reg = er.async_get(hass)
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_status_signal",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test pushed state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"Wifi":{"Signal":20.5}}'
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.5"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS11",
'{"StatusSTS":{"Wifi":{"Signal":20.0}}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.0"
# Test force update flag
entity = hass.data["entity_components"]["sensor"].get_entity(
"sensor.tasmota_status"
)
assert entity.force_update
@pytest.mark.parametrize("status_sensor_disabled", [False])
async def test_single_shot_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
entity_reg = er.async_get(hass)
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_status_restart_reason",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Some reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Test polled state update is ignored
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Another reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Device signals online again
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Another reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Another reason"
# Test polled state update is ignored
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Third reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Another reason"
@pytest.mark.parametrize("status_sensor_disabled", [False])
@patch.object(hatasmota.status_sensor, "datetime", Mock(wraps=datetime.datetime))
async def test_restart_time_status_sensor_state_via_mqtt(
hass, mqtt_mock, setup_tasmota
):
"""Test state update via MQTT."""
entity_reg = er.async_get(hass)
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_last_restart_time",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test polled state update
utc_now = datetime.datetime(2020, 11, 11, 8, 0, 0, tzinfo=dt.UTC)
hatasmota.status_sensor.datetime.now.return_value = utc_now
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS11",
'{"StatusSTS":{"UptimeSec":"3600"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "2020-11-11T07:00:00+00:00"
async def test_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"DHT11": {"Temperature": None},
"Beer": {"CarbonDioxide": None},
"TempUnit": "C",
}
}
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.attributes.get("device_class") == "temperature"
assert state.attributes.get("friendly_name") == "Tasmota DHT11 Temperature"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "°C"
state = hass.states.get("sensor.tasmota_beer_CarbonDioxide")
assert state.attributes.get("device_class") == "carbon_dioxide"
assert state.attributes.get("friendly_name") == "Tasmota Beer CarbonDioxide"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "ppm"
async def test_nested_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Speed Act"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "km/h"
state = hass.states.get("sensor.tasmota_tx23_dir_avg")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Dir Avg"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == " "
async def test_indexed_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"Dummy1": {"Temperature": [None, None]},
"Dummy2": {"CarbonDioxide": [None, None]},
"TempUnit": "C",
}
}
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dummy1_temperature_0")
assert state.attributes.get("device_class") == "temperature"
assert state.attributes.get("friendly_name") == "Tasmota Dummy1 Temperature 0"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "°C"
state = hass.states.get("sensor.tasmota_dummy2_carbondioxide_1")
assert state.attributes.get("device_class") == "carbon_dioxide"
assert state.attributes.get("friendly_name") == "Tasmota Dummy2 CarbonDioxide 1"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "ppm"
@pytest.mark.parametrize("status_sensor_disabled", [False])
@pytest.mark.parametrize(
"sensor_name, disabled, disabled_by",
[
("tasmota_firmware_version", True, er.DISABLED_INTEGRATION),
("tasmota_ip", True, er.DISABLED_INTEGRATION),
("tasmota_last_restart_time", False, None),
("tasmota_mqtt_connect_count", False, None),
("tasmota_rssi", True, er.DISABLED_INTEGRATION),
("tasmota_signal", True, er.DISABLED_INTEGRATION),
("tasmota_ssid", False, None),
("tasmota_wifi_connect_count", False, None),
],
)
async def test_diagnostic_sensors(
hass, mqtt_mock, setup_tasmota, sensor_name, disabled, disabled_by
):
"""Test properties of diagnostic sensors."""
entity_reg = er.async_get(hass)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get(f"sensor.{sensor_name}")
assert bool(state) != disabled
entry = entity_reg.async_get(f"sensor.{sensor_name}")
assert entry.disabled == disabled
assert entry.disabled_by == disabled_by
assert entry.entity_category == "diagnostic"
@pytest.mark.parametrize("status_sensor_disabled", [False])
async def test_enable_status_sensor(hass, mqtt_mock, setup_tasmota):
"""Test enabling status sensor."""
entity_reg = er.async_get(hass)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_signal")
assert state is None
entry = entity_reg.async_get("sensor.tasmota_signal")
assert entry.disabled
assert entry.disabled_by == er.DISABLED_INTEGRATION
# Enable the signal level status sensor
updated_entry = entity_reg.async_update_entity(
"sensor.tasmota_signal", disabled_by=None
)
assert updated_entry != entry
assert updated_entry.disabled is False
await hass.async_block_till_done()
async_fire_time_changed(
hass,
dt.utcnow() + timedelta(seconds=config_entries.RELOAD_AFTER_UPDATE_DELAY + 1),
)
await hass.async_block_till_done()
# Fake re-send of retained discovery message
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_signal")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_signal")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability_when_connection_lost(
hass,
mqtt_client_mock,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability_discovery_update(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
poll_topic = "tasmota_49A3BC/cmnd/STATUS"
await help_test_availability_poll_state(
hass,
mqtt_client_mock,
mqtt_mock,
sensor.DOMAIN,
config,
poll_topic,
"10",
sensor_config,
)
async def test_discovery_removal_sensor(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config1 = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_discovery_removal(
hass,
mqtt_mock,
caplog,
sensor.DOMAIN,
config,
config,
sensor_config1,
{},
"tasmota_dht11_temperature",
"Tasmota DHT11 Temperature",
)
async def test_discovery_update_unchanged_sensor(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test update of discovered sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
with patch(
"homeassistant.components.tasmota.sensor.TasmotaSensor.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass,
mqtt_mock,
caplog,
sensor.DOMAIN,
config,
discovery_update,
sensor_config,
"tasmota_dht11_temperature",
"Tasmota DHT11 Temperature",
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
unique_id = f"{DEFAULT_CONFIG['mac']}_sensor_sensor_DHT11_Temperature"
await help_test_discovery_device_remove(
hass, mqtt_mock, sensor.DOMAIN, unique_id, config, sensor_config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
topics = [
get_topic_tele_sensor(config),
get_topic_stat_status(config, 10),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
topics,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_entity_id_update_discovery_update(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
| aronsky/home-assistant | tests/components/tasmota/test_sensor.py | Python | apache-2.0 | 33,360 |
# Copyright 2021 The TF-Coder Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for filter_group.py."""
from absl.testing import absltest
from tf_coder import filter_group
class FilterGroupTest(absltest.TestCase):
def test_filter_group_enum_names(self):
for enum_value in filter_group.FilterGroup:
self.assertEqual(enum_value.name, enum_value.value)
if __name__ == '__main__':
absltest.main()
| google-research/tensorflow-coder | tf_coder/filter_group_test.py | Python | apache-2.0 | 947 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.