id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
11225673
|
#!/usr/bin/env python3
from __future__ import print_function
import errno
import sys
import logging
from pyocd.core.helpers import ConnectHelper
from pyocd.flash.file_programmer import FileProgrammer
from pyocd.flash.eraser import FlashEraser
from binho.utils import log_silent, log_verbose
from binho.errors import DeviceNotFoundError
from binho.utils import binhoArgumentParser
def main():
# Set up a simple argument parser.
parser = binhoArgumentParser(
description="utility for using supported Binho host adapters in DAPLink mode to flash code to MCUs"
)
parser.add_argument("-t", "--target", default=None, help="Manufacturer part number of target device")
parser.add_argument("-f", "--file", default=None, help="Path to binary file to program")
parser.add_argument(
"-e", "--erase", action="store_true", help="Perform chip-erase before programming",
)
parser.add_argument(
"-r", "--reset", action="store_true", help="Reset the device after programming completes",
)
args = parser.parse_args()
log_function = log_verbose if args.verbose else log_silent
log_function("Checking for pyOCD...")
try:
import pyocd # pylint: disable=import-outside-toplevel
except ModuleNotFoundError:
print("PyOCD must be installed for this to work. Use 'pip install pyocd' to install the module.")
sys.exit(1)
log_function("pyOCD installation confirmed!")
try:
log_function("Trying to find a Binho host adapter...")
device = parser.find_specified_device()
if device.inDAPLinkMode:
log_function(
"{} found on {} in DAPLink mode (Device ID: {})".format(
device.productName, device.commPort, device.deviceID
)
)
else:
log_function("{} found on {}. (Device ID: {})".format(device.productName, device.commPort, device.deviceID))
print("The {} is not in DAPLink mode. Please use the 'binho daplink' command ")
sys.exit(errno.ENODEV)
except DeviceNotFoundError:
if args.serial:
print(
"No Binho host adapter found matching Device ID '{}'.".format(args.serial), file=sys.stderr,
)
else:
print("No Binho host adapter found!", file=sys.stderr)
sys.exit(errno.ENODEV)
# if we fail before here, no connection to the device was opened yet.
# however, if we fail after this point, we need to make sure we don't
# leave the serial port open.
try:
if not args.file and not (args.erase or args.reset):
print("No binary file to program was supplied.")
sys.exit(1)
erase_setting = "auto"
target_override = "cortex_m"
if args.erase:
erase_setting = "chip"
if args.target:
target_override = args.target
if args.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.WARNING)
with ConnectHelper.session_with_chosen_probe(
target_override=target_override, chip_erase=erase_setting, smart_flash="false",
) as session:
board = session.board
target = board.target
print("Vendor: {}\tPart Number: {}".format(target.vendor, target.part_number))
if args.erase:
eraser = FlashEraser(session, FlashEraser.Mode.CHIP)
eraser.erase()
print("{} erased".format(target.part_number))
if args.file:
FileProgrammer(session).program(args.file)
log_function("Target {} programmed with {}".format(target.part_number, args.file))
if args.reset:
target.reset()
print("Target {} reset".format(target.part_number))
except pyocd.core.exceptions.TransferError:
print(
"Problem communicating with the target MCU. Please make sure SWDIO, SWCLK, and GND are properly "
" connected and the MCU is powered up."
)
finally:
# close the connection to the host adapter
device.close()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
35582
|
#!/usr/bin/python
import sys
import csv
def split_num(n, M):
# n : original number
# M : max size for split range
nsplit = n//M
if nsplit*M < n:
nsplit += 1
ninterval = n//nsplit
ncum = 1
end = 0
res = []
while end < n:
start = ncum
ncum += ninterval
end = ncum-1
if end > n:
end = n
res.append("{0}-{1}".format(start, end))
return res
def split_num_given_args():
n = int(sys.argv[1]) # original number
M = int(sys.argv[2]) # max size for split range
print split_num(n, M)
def split_chrom(chromsize_file, M):
with open(chromsize_file, 'r') as f:
reader = csv.reader(f, delimiter='\t')
for row in reader:
for interval in split_num(int(row[1]), int(M)):
print ("{chr}:{interval}".format(chr=row[0], interval=interval))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Arguments")
parser.add_argument("-c", "--chrom", help="Chrom.size file, tab-delimited")
parser.add_argument("-M", "--max_split_size", help="Maximum split size")
args = parser.parse_args()
split_chrom(args.chrom, args.max_split_size)
|
StarcoderdataPython
|
1971833
|
import re
from contextlib import suppress
from queue import Empty, PriorityQueue, Queue
from sys import maxsize
from threading import Thread
from time import time_ns
from .config import cfg
from .graphics import Image, ImageDraw, ImageOps, blur, encode
from .vocab_pick import Set
CANVAS_SZ = (cfg['image-max-width'], cfg['image-max-height'])
CC_CNV_GAP = 10
CC_BORDER = 10
P_PAUSE_GIF = re.compile(cfg['patt-gif-loop-pause']).search
PAUSE_GIF_DUR = cfg['gif-pause-duration'] * 1_000_000_000
def im_resize(im: Image.Image, w: int, h: int) -> Image.Image:
imw, imh = im.size
wscale = min((1, w / imw))
hscale = min((1, h / imh))
scale = min((wscale, hscale))
sz = (int(imw * scale), int(imh * scale))
return im.resize(sz)
class Canvas:
def __init__(self, win):
self.__sgimgs = (win['-IM1-'], win['-IM2-'], win['-IM3-'])
self.__ccwidth = win.size[0] - CANVAS_SZ[0] + CC_BORDER
self.__ccmargin = (
win.size[1] - win['-CCARD-'].Widget.winfo_height()) // 2 - CC_BORDER
self.__set = []
self.__frames = None
self.__imq = PriorityQueue()
self.__nimgs = 0
self.__imid = 0
def __im_prepare(self, im: Image.Image):
'''
Prepares given frame for presentation as the window background image.
Because background images aren't actually supported, the image is split into tiles instead.
'''
cw, ch = CANVAS_SZ
ww, wh = cw + self.__ccwidth + CC_CNV_GAP, ch
bg = blur(ImageOps.fit(im, (ww, wh), centering=(0, 0.5)), False)
# Paste canvas (clear image)
cnv = im_resize(im, cw, ch)
cx = int((cw - cnv.size[0]) / 2)
cy = int((ch - cnv.size[1]) / 2)
bg.paste(cnv, (cx, cy))
# Draw rounded edge of command card
d: ImageDraw = ImageDraw.Draw(bg)
ccx, ccy = ww - self.__ccwidth, self.__ccmargin
rc = [ccx, ccy + CC_BORDER, ccx +
CC_BORDER, wh - self.__ccmargin - CC_BORDER - 1]
d.rectangle(rc, cfg['color-background'])
d.pieslice([ccx, rc[1] - CC_BORDER, ccx + CC_BORDER * 2,
rc[1] + CC_BORDER], 180, 270, cfg['color-background'])
d.pieslice([ccx, rc[3] - CC_BORDER, ccx + CC_BORDER * 2,
rc[3] + CC_BORDER], 90, 180, cfg['color-background'])
# Return image data
return encode(bg.crop((0, 0, rc[2], wh))), encode(bg.crop(
(rc[2], 0, ww, self.__ccmargin))), encode(bg.crop((rc[2], wh - self.__ccmargin, ww, wh)))
def __set_frame(self):
'''
Presents the currently active frame. Only call from main thread.
'''
immain, imtopr, imbotr = self.__sgimgs
data = self.__frames[self.__frame_idx][0]
szcnv = (CANVAS_SZ[0] + CC_CNV_GAP + CC_BORDER, CANVAS_SZ[1])
szmrg = (self.__ccwidth - CC_BORDER, self.__ccmargin)
immain(data=data[0], size=szcnv)
imtopr(data=data[1], size=szmrg)
imbotr(data=data[2], size=szmrg)
def __build_frames(self, path, imid):
'''
Produces a set of background image frames representing given image.
'''
frames = []
with Image.open(path, 'r') as im:
with suppress(EOFError):
while True:
frames.append([self.__im_prepare(
im), im.info['duration'] * 1_000_000 if 'duration' in im.info else 0])
im.seek(len(frames))
if P_PAUSE_GIF(path):
frames[-1][1] += PAUSE_GIF_DUR
self.__imq.put((imid, frames))
def set_image(self, set_: Set, img_idx: int):
'''
Submit a new image for presentation. Will be prepared on another thread.
'''
if self.__set != set_:
self.__set, self.__img_idx = set_, img_idx
elif self.__img_idx > img_idx:
self.__img_idx = img_idx
elif self.__img_idx < img_idx:
self.__img_idx = img_idx
else:
return # Same
self.__nimgs += 1
Thread(target=Canvas.__build_frames, args=(
self, set_[self.__img_idx], -self.__nimgs), daemon=True).start()
def update(self):
'''
Updates the presented image, if necessary. Call frequently from program main loop.
Returns, in milliseconds, the time to wait for next update() call.
'''
with suppress(Empty):
imid, frames = self.__imq.get(not self.__frames)
if self.__imid > imid: # Only consider newer images
self.__imid, self.__frames = imid, frames
self.__time = time_ns()
self.__frame_idx = 0
time_ = time_ns()
frame_time = time_ - self.__time
if self.__frames[self.__frame_idx][1] <= frame_time:
nframes = len(self.__frames)
self.__frame_idx = (self.__frame_idx + 1) % nframes
if nframes == 1:
self.__frames[self.__frame_idx][1] = maxsize
self.__time = time_
self.__set_frame()
frame_time = time_ns() - self.__time
# return self.__frames[self.__frame_idx][1] / 1_000_000
return max(
0, (self.__frames[self.__frame_idx][1] - frame_time) / 1_000_000)
|
StarcoderdataPython
|
376111
|
from grin import get_grin_arg_parser
from genzshcomp import CompletionGenerator
if __name__ == '__main__':
generator = CompletionGenerator("grin", get_grin_arg_parser())
print generator.get()
|
StarcoderdataPython
|
4951274
|
import numpy as np
import theano
import theano.tensor as T
from collections import OrderedDict
a = theano.shared(np.zeros((1, 1), dtype = theano.config.floatX))
b = theano.shared(np.zeros((1, 1), dtype = theano.config.floatX))
c = T.scalar()
updates = OrderedDict()
updates[a] = a + 1
updates[b] = b + a
f = theano.function([c], c, updates = updates)
print f(1)
print a.get_value()
print b.get_value()
print f(2)
print a.get_value()
print b.get_value()
print f(3)
print a.get_value()
print b.get_value()
|
StarcoderdataPython
|
305504
|
<reponame>danmohad/PMC-thermodynamics
# -*- coding: utf-8 -*-
"""
Porous Media Combustor (PMC) Class
Copyright 2020, <NAME>, All rights reserved.
Refer to <NAME>, <NAME>, <NAME>, "Thermodynamic cycle analysis of superadiabatic matrix-stabilized combustion for gas turbine engines," Energy (207) 2020.
"""
import numpy as np
from sympy import Symbol, nsolve
# Class for solving steady-state PMC problem with dimensional parameters
class PMC:
def __init__(self,PMCData):
self.lambda_eff = PMCData.lambda_eff
self.voidFrac = PMCData.voidFrac
self.massFlux = PMCData.massFlux
self.L = PMCData.L
self.dp_upstr = PMCData.dp_upstr
self.dp_dwnstr = PMCData.dp_dwnstr
self.emissivity = PMCData.emissivity
self.DeltaT_PH = PMCData.DeltaT_PH
self.DeltaT_RC = PMCData.DeltaT_RC
# Solve the PMC problem
def run(self,cp,T0,T3a,T4,f,f_st,LHV):
th_3a = T3a/T0
th_4 = T4/T0
K_R_1 = 3*(1.-self.voidFrac)/(self.dp_upstr*1000.) #from Hsu & Powell,
K_R_2 = 3*(1.-self.voidFrac)/(self.dp_dwnstr*1000.) # pore diameter in mm and K_R
K_R = np.average([K_R_1,K_R_2]) # in m^-1
iota = self.lambda_eff/(self.massFlux*cp*self.L)
delta = (16.*ct.stefan_boltzmann*T0**3)/(3.*K_R*self.lambda_eff)
mu = (3.*self.emissivity*K_R*(1.-self.voidFrac)*self.L)/16.
print('iota =',iota,'delta =',delta,'mu =',mu)
Delta1 = self.DeltaT_PH/T0
Delta2 = self.DeltaT_RC/T0
if f < f_st:
LHV = LHV #lean
else:
LHV = LHV/(f/f_st) #rich
th_3x = Symbol('THETA_3x')
th_3y = Symbol('THETA_3y')
th_3b = Symbol('THETA_3b')
th_S1 = Symbol('THETA_S1')
th_S2 = Symbol('THETA_S2')
f1 = iota*((1.+delta*((th_S1+th_S2)/2.)**3)*(th_S2-th_S1) -
delta*mu*(th_S1**4-th_3a**4)) - (th_3x-th_3a)
f2 = f/(f+1.)*(LHV/(cp*T0)) - (th_3y - th_3x) #local eq ratio (DON'T use beta here!!)
f3 = iota*((1.+delta*((th_S1+th_S2)/2.)**3)*(th_S2-th_S1) +
delta*mu*(th_S2**4-th_4**4)) - (th_3y-th_3b)
f4 = Delta1 - (th_S1 - th_3x)
f5 = Delta2 - (th_3b - th_S2)
SOL = nsolve((f1,f2,f3,f4,f5),(th_3x,th_3y,th_3b,th_S1,th_S2),\
((T3a+1000)/T3a,(T3a+1500)/T3a,\
(T3a+1000)/T3a,(T3a+1000)/T3a,\
(T3a+1000)/T3a),tol=1e-6)
SOL *= T0 # return to dimensional units
self.T3x,self.T3y,self.T3b = SOL[0],SOL[1],SOL[2] #gas temperatures
self.TS1,self.TS2 = SOL[3],SOL[4] #solid temperatures
return
# Class for solving steady-state PMC problem with non-dimensional parameters
class PMCNondim(PMC):
def __init__(self,PMCData):
self.iota = PMCData.iota
self.delta = PMCData.delta
self.mu = PMCData.mu
self.DeltaT_PH = PMCData.DeltaT_PH
self.DeltaT_RC = PMCData.DeltaT_RC
# Solve the PMC problem
def run(self,cp,T0,T3a,T4,f,f_st,LHV):
th_3a = T3a/T0
th_4 = T4/T0
Delta1 = self.DeltaT_PH/T0
Delta2 = self.DeltaT_RC/T0
if f < f_st:
LHV = LHV #lean
else:
LHV = LHV/(f/f_st) #rich
th_3x = Symbol('THETA_3x')
th_3y = Symbol('THETA_3y')
th_3b = Symbol('THETA_3b')
th_S1 = Symbol('THETA_S1')
th_S2 = Symbol('THETA_S2')
iota = self.iota
delta = self.delta
mu = self.mu
f1 = iota*((1.+delta*((th_S1+th_S2)/2.)**3)*(th_S2-th_S1) -
delta*mu*(th_S1**4-th_3a**4)) - (th_3x-th_3a)
f2 = f/(f+1.)*(LHV/(cp*T0)) - (th_3y - th_3x) #local eq ratio (DON'T use beta here!!)
f3 = iota*((1.+delta*((th_S1+th_S2)/2.)**3)*(th_S2-th_S1) +
delta*mu*(th_S2**4-th_4**4)) - (th_3y-th_3b)
f4 = Delta1 - (th_S1 - th_3x)
f5 = Delta2 - (th_3b - th_S2)
SOL = nsolve((f1,f2,f3,f4,f5),(th_3x,th_3y,th_3b,th_S1,th_S2),\
((T3a+1000)/T3a,(T3a+1500)/T3a,\
(T3a+1000)/T3a,(T3a+1000)/T3a,\
(T3a+1000)/T3a),tol=1e-6)
SOL *= T0 # return to dimensional units
self.T3x,self.T3y,self.T3b = SOL[0],SOL[1],SOL[2] #gas temperatures
self.TS1,self.TS2 = SOL[3],SOL[4] #solid temperatures
return
|
StarcoderdataPython
|
3221875
|
from rest_framework import viewsets
from score.models import Album, Score
from score.serializers import AlbumSerializer, ScoreSerializer
class AlbumViewset(viewsets.ModelViewSet):
queryset = Album.objects.all()
serializer_class = AlbumSerializer
class ScoreViewset(viewsets.ModelViewSet):
queryset = Score.objects.all()
serializer_class = ScoreSerializer
|
StarcoderdataPython
|
5131710
|
<reponame>japonophile/darwin
from ikpy.chain import Chain
from ikpy.URDF_utils import get_chain_from_joints
from urdfpy import URDF
import math
import sys
def ang(offset_deg):
abs_deg = 150 + offset_deg
abs_rad = math.pi * abs_deg / 180
return abs_rad
DARWIN_URDF = 'darwin.urdf'
robot = URDF.load(DARWIN_URDF)
joint_names = list(filter(lambda j: j.joint_type != 'fixed', robot.joints))
default_angle = math.pi * 150 / 180
cfg = dict(zip(joint_names, [default_angle] * len(joint_names)))
cfg['neck_joint'] = ang(-60)
cfg['l_shoulder_joint'] = ang(-60)
cfg['l_biceps_joint'] = ang(60)
cfg['l_elbow_joint'] = ang(-120) # limit: 0-150
cfg['r_shoulder_joint'] = ang(-60)
cfg['r_biceps_joint'] = ang(-60)
cfg['r_elbow_joint'] = ang(120) # limit:150-300
cfg['l_hip_joint'] = ang(30)
cfg['l_thigh_joint'] = ang(-60)
cfg['l_knee_joint'] = ang(30)
cfg['l_ankle_joint'] = ang(30)
cfg['l_foot_joint'] = ang(-30)
cfg['r_hip_joint'] = ang(-30)
cfg['r_thigh_joint'] = ang(-60)
cfg['r_knee_joint'] = ang(30)
cfg['r_ankle_joint'] = ang(30)
cfg['r_foot_joint'] = ang(30)
# robot.show(cfg=cfg)
# robot.animate()
def print_link(l, indent=''):
sys.stdout.write(l.name + '\n')
for j in robot.joints:
if j.parent == l.name:
joint_type = '(R)' if j.joint_type == 'revolute' else '(F)'
sys.stdout.write('{}{} {} -> '.format(indent, j.name, joint_type))
print_link(robot.link_map[j.child], indent + ' ')
print_link(robot.base_link)
# motors = ['l_shoulder_anchor_joint', 'l_shoulder_servo_joint',
# 'l_shoulder_joint', 'l_biceps_anchor_joint',
# 'l_biceps_axis_joint', 'l_biceps_joint', 'l_biceps_spo_joint']
# chain_elements = get_chain_from_joints(DARWIN_URDF, motors)
# -> this does not work due to StopIteration! How does this code even exist???
def get_chain(name_templates, side):
assert side in ['l', 'r']
element_names = ['body_link'] + list(map(lambda s: s.format(side), name_templates))
return Chain.from_urdf_file(DARWIN_URDF, base_elements=element_names)
arm_element_names = ['{}_shoulder_joint', '{}_shoulder_link',
'{}_biceps_joint', '{}_biceps_link',
'{}_elbow_joint', '{}_hand_link']
leg_element_names = ['{}_hip_joint', '{}_hip_link',
'{}_thigh_joint', '{}_thigh_link',
'{}_knee_joint', '{}_lowerleg_link',
'{}_ankle_joint', '{}_ankle_link',
'{}_foot_joint', '{}_foot_link']
left_arm = get_chain(arm_element_names, 'l')
right_arm = get_chain(arm_element_names, 'r')
left_leg = get_chain(leg_element_names, 'l')
right_leg = get_chain(leg_element_names, 'r')
# chain_elements = ['body_link', 'l_shoulder_joint', 'l_shoulder_link',
# 'l_biceps_joint', 'l_biceps_link', 'l_elbow_joint',
# 'l_hand_link']
# chain = Chain.from_urdf_file(DARWIN_URDF, base_elements=chain_elements)
# print(chain)
print('left_arm', left_arm)
print('right_arm', right_arm)
print('left_leg', left_leg)
print('right_leg', right_leg)
|
StarcoderdataPython
|
6425652
|
<gh_stars>100-1000
import unittest
from uvm.reg.uvm_reg_predictor import UVMRegPredictor, UVMPredictS
from uvm.uvm_unit import (create_reg, create_reg_block, TestPacket,
TestRegAdapter)
class TestUVMRegPredictor(unittest.TestCase):
def test_create_predictor(self):
predict = UVMRegPredictor("predictor_123", None)
self.assertEqual(predict.get_name(), "predictor_123")
def test_predict(self):
predict = UVMRegPredictor("predictor_345", None)
predict.adapter = TestRegAdapter()
rg = create_reg_block('my_block')
rg.lock_model()
predict.map = rg.default_map
predict.bus_in.write(TestPacket(123, 0x0))
def test_check_phase(self):
predict = UVMRegPredictor("predictor_567", None)
predict.check_phase(phase=None)
predict.m_pending[create_reg('test_reg')] = UVMPredictS()
predict.check_phase(phase=None)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
3520958
|
<reponame>timgianitsos/ancient_greek_genre_classification
import greek_features #seemingly unused here, but this makes the environment recognize features
import extract_features
from corpus_categories import composite_files, verse_misc_files, prose_files
import os
import sys
if __name__ == '__main__':
#Download corpus if non-existent
corpus_dir = os.path.join('tesserae', 'texts', 'grc')
tesserae_clone_command = 'git clone https://github.com/timgianitsos/tesserae.git'
if not os.path.isdir(corpus_dir):
print(RED + 'Corpus at ' + corpus_dir + ' does not exist - attempting to clone repository...' + RESET)
if os.system(tesserae_clone_command) is not 0:
raise Exception('Unable to obtain corpus for feature extraction')
#Feature extractions
extract_features.main(
corpus_dir,
'tess',
#Exclude the following directories and files
excluded_paths=composite_files | verse_misc_files | prose_files,
#Output the results to a file to be processed by machine learning algorithms
output_file=None if len(sys.argv) <= 1 else sys.argv[1]
)
|
StarcoderdataPython
|
8013554
|
<filename>deploy/raw_test.py
from flask import Flask,abort,request,jsonify
app = Flask(__name__)
@app.route('/',methods=['GET','POST'])
def home():
return '<h>RedHouse Project</h>'
@app.route('/add_tast/',methods=['POST'])
def add_task():
print('###############',123123123123)
print(request.json)
return jsonify({'result':'success'})
if __name__ == "__main__":
app.run("0.0.0.0",5000,debug=False)
|
StarcoderdataPython
|
9695948
|
<reponame>sarang-apps/darshan_browser
#!/usr/bin/env vpython
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import shutil
import tempfile
import unittest
from core import path_util
path_util.AddTelemetryToPath()
from telemetry import decorators
import mock
import process_perf_results as ppr_module
UUID_SIZE = 36
class _FakeLogdogStream(object):
def write(self, data):
del data # unused
def close(self):
pass
def get_viewer_url(self):
return 'http://foobar.not.exit'
# pylint: disable=protected-access
class DataFormatParsingUnitTest(unittest.TestCase):
def tearDown(self):
ppr_module._data_format_cache = {}
def testGtest(self):
with mock.patch('__builtin__.open', mock.mock_open(read_data='{}')):
self.assertTrue(ppr_module._is_gtest('test.json'))
self.assertFalse(ppr_module._is_histogram('test.json'))
self.assertTrue(ppr_module._is_gtest('test.json'))
self.assertFalse(ppr_module._is_histogram('test.json'))
def testChartJSON(self):
with mock.patch('__builtin__.open',
mock.mock_open(read_data='{"charts": 1}')):
self.assertFalse(ppr_module._is_gtest('test.json'))
self.assertFalse(ppr_module._is_histogram('test.json'))
self.assertFalse(ppr_module._is_gtest('test.json'))
self.assertFalse(ppr_module._is_histogram('test.json'))
def testHistogram(self):
with mock.patch('__builtin__.open', mock.mock_open(read_data='[]')):
self.assertTrue(ppr_module._is_histogram('test.json'))
self.assertFalse(ppr_module._is_gtest('test.json'))
self.assertTrue(ppr_module._is_histogram('test.json'))
self.assertFalse(ppr_module._is_gtest('test.json'))
class ProcessPerfResultsIntegrationTest(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.output_json = os.path.join(self.test_dir, 'output.json')
self.task_output_dir = os.path.join(
os.path.dirname(__file__), 'testdata', 'task_output_dir')
m1 = mock.patch(
'process_perf_results.logdog_helper.text',
return_value = 'http://foo.link')
m1.start()
self.addCleanup(m1.stop)
m2 = mock.patch(
'process_perf_results.logdog_helper.open_text',
return_value=_FakeLogdogStream())
m2.start()
self.addCleanup(m2.stop)
m3 = mock.patch('core.results_dashboard.SendResults')
m3.start()
self.addCleanup(m3.stop)
def tearDown(self):
shutil.rmtree(self.test_dir)
@decorators.Disabled('chromeos') # crbug.com/865800
@decorators.Disabled('win') # crbug.com/860677, mock doesn't integrate well
# with multiprocessing on Windows.
@decorators.Disabled('all') # crbug.com/967125
def testIntegration(self):
build_properties = json.dumps({
'perf_dashboard_machine_group': 'test-builder',
'buildername': 'test-builder',
'buildnumber': 777,
'got_v8_revision': 'beef1234',
'got_revision_cp': 'refs/heads/master@{#1234}',
'got_webrtc_revision': 'fee123',
'git_revision': 'deadbeef',
'buildbucket': r"""{"build":
{"bucket": "master.tryserver.chromium.perf",
"created_by": "user:foo",
"created_ts": "1535490272757820",
"id": "8936915467712010816",
"project": "chrome",
"lease_key": "461228535",
"tags": ["builder:android-go-perf", "buildset:patch/1194825/3",
"cq_experimental:False",
"master:master.tryserver.chromium.perf",
"user_agent:cq"]}}"""
})
output_results_dir = os.path.join(self.test_dir, 'outputresults')
os.mkdir(output_results_dir)
return_code, benchmark_upload_result_map = ppr_module.process_perf_results(
self.output_json, configuration_name='test-builder',
build_properties=build_properties,
task_output_dir=self.task_output_dir,
smoke_test_mode=False,
output_results_dir=output_results_dir)
# Output filenames are prefixed with a UUID. Strip it off.
output_results = {
filename[UUID_SIZE:]: os.stat(os.path.join(
output_results_dir, filename)).st_size
for filename in os.listdir(output_results_dir)}
self.assertEquals(32, len(output_results))
self.assertLess(10 << 10, output_results["power.desktop.reference"])
self.assertLess(10 << 10, output_results["blink_perf.image_decoder"])
self.assertLess(10 << 10, output_results["octane.reference"])
self.assertLess(10 << 10, output_results["power.desktop"])
self.assertLess(10 << 10, output_results["speedometer-future"])
self.assertLess(10 << 10, output_results["blink_perf.owp_storage"])
self.assertLess(10 << 10, output_results["memory.desktop"])
self.assertLess(10 << 10, output_results["wasm"])
self.assertLess(10 << 10, output_results[
"dummy_benchmark.histogram_benchmark_1"])
self.assertLess(10 << 10, output_results[
"dummy_benchmark.histogram_benchmark_1.reference"])
self.assertLess(10 << 10, output_results["wasm.reference"])
self.assertLess(10 << 10, output_results["speedometer"])
self.assertLess(10 << 10, output_results[
"memory.long_running_idle_gmail_tbmv2"])
self.assertLess(10 << 10, output_results["v8.runtime_stats.top_25"])
self.assertLess(1 << 10, output_results[
"dummy_benchmark.noisy_benchmark_1"])
self.assertLess(10 << 10, output_results["blink_perf.svg"])
self.assertLess(10 << 10, output_results[
"v8.runtime_stats.top_25.reference"])
self.assertLess(10 << 10, output_results["jetstream.reference"])
self.assertLess(10 << 10, output_results["jetstream"])
self.assertLess(10 << 10, output_results["speedometer2-future.reference"])
self.assertLess(10 << 10, output_results["blink_perf.svg.reference"])
self.assertLess(10 << 10, output_results[
"blink_perf.image_decoder.reference"])
self.assertLess(10 << 10, output_results["power.idle_platform.reference"])
self.assertLess(10 << 10, output_results["power.idle_platform"])
self.assertLess(1 << 10, output_results[
"dummy_benchmark.noisy_benchmark_1.reference"])
self.assertLess(10 << 10, output_results["speedometer-future.reference"])
self.assertLess(10 << 10, output_results[
"memory.long_running_idle_gmail_tbmv2.reference"])
self.assertLess(10 << 10, output_results["memory.desktop.reference"])
self.assertLess(10 << 10, output_results[
"blink_perf.owp_storage.reference"])
self.assertLess(10 << 10, output_results["octane"])
self.assertLess(10 << 10, output_results["speedometer.reference"])
self.assertEquals(return_code, 1)
self.assertEquals(benchmark_upload_result_map,
{
"power.desktop.reference": True,
"blink_perf.image_decoder": True,
"octane.reference": True,
"power.desktop": True,
"speedometer-future": True,
"blink_perf.owp_storage": True,
"memory.desktop": True,
"wasm": True,
"dummy_benchmark.histogram_benchmark_1": True,
"dummy_benchmark.histogram_benchmark_1.reference": True,
"wasm.reference": True,
"speedometer": True,
"memory.long_running_idle_gmail_tbmv2": True,
"v8.runtime_stats.top_25": True,
"dummy_benchmark.noisy_benchmark_1": True,
"blink_perf.svg": True,
"v8.runtime_stats.top_25.reference": True,
"jetstream.reference": True,
"jetstream": True,
"speedometer2-future.reference": True,
"speedometer2-future": False, # Only this fails due to malformed data
"blink_perf.svg.reference": True,
"blink_perf.image_decoder.reference": True,
"power.idle_platform.reference": True,
"power.idle_platform": True,
"dummy_benchmark.noisy_benchmark_1.reference": True,
"speedometer-future.reference": True,
"memory.long_running_idle_gmail_tbmv2.reference": True,
"memory.desktop.reference": True,
"blink_perf.owp_storage.reference": True,
"octane": True,
"speedometer.reference": True
})
class ProcessPerfResults_HardenedUnittest(unittest.TestCase):
def setUp(self):
self._logdog_text = mock.patch(
'process_perf_results.logdog_helper.text',
return_value = 'http://foo.link')
self._logdog_text.start()
self.addCleanup(self._logdog_text.stop)
self._logdog_open_text = mock.patch(
'process_perf_results.logdog_helper.open_text',
return_value=_FakeLogdogStream())
self._logdog_open_text.start()
self.addCleanup(self._logdog_open_text.stop)
@decorators.Disabled('chromeos') # crbug.com/956178
def test_handle_perf_json_test_results_IOError(self):
directory_map = {
'benchmark.example': ['directory_that_does_not_exist']}
test_results_list = []
ppr_module._handle_perf_json_test_results(directory_map, test_results_list)
self.assertEqual(test_results_list, [])
@decorators.Disabled('chromeos') # crbug.com/956178
def test_last_shard_has_no_tests(self):
benchmark_name = 'benchmark.example'
temp_parent_dir = tempfile.mkdtemp(suffix='test_results_outdir')
try:
shard1_dir = os.path.join(temp_parent_dir, 'shard1')
os.mkdir(shard1_dir)
shard2_dir = os.path.join(temp_parent_dir, 'shard2')
os.mkdir(shard2_dir)
with open(os.path.join(shard1_dir, 'test_results.json'), 'w') as fh:
fh.write(
'{"version": 3, "tests":{"v8.browsing_desktop-future": "blah"}}')
with open(os.path.join(shard2_dir, 'test_results.json'), 'w') as fh:
fh.write('{"version": 3,"tests":{}}')
directory_map = {
benchmark_name: [shard1_dir, shard2_dir]}
benchmark_enabled_map = ppr_module._handle_perf_json_test_results(
directory_map, [])
self.assertTrue(benchmark_enabled_map[benchmark_name],
'Regression test for crbug.com/984565')
finally:
shutil.rmtree(temp_parent_dir)
@decorators.Disabled('chromeos') # crbug.com/956178
def test_merge_perf_results_IOError(self):
results_filename = None
directories = ['directory_that_does_not_exist']
ppr_module._merge_perf_results('benchmark.example', results_filename,
directories)
@decorators.Disabled('chromeos') # crbug.com/956178
def test_handle_perf_logs_no_log(self):
tempdir = tempfile.mkdtemp()
try:
dir1 = os.path.join(tempdir, '1')
dir2 = os.path.join(tempdir, '2')
os.makedirs(dir1)
os.makedirs(dir2)
with open(os.path.join(dir1, 'benchmark_log.txt'), 'w') as logfile:
logfile.write('hello world')
directory_map = {
'benchmark.with.log': [dir1],
'benchmark.with.no.log': [dir2],
}
extra_links = {}
ppr_module._handle_perf_logs(directory_map, extra_links)
finally:
shutil.rmtree(tempdir)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
6483659
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 by <NAME>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import gettext
import os
class MsgTranslator(object):
translate = None
def __init__(self):
self.translate = self.dummy_translate
def dummy_translate(self, msg):
return msg
def set_locale(self, textdomain, msgs_path, locale='system'):
msgs_path = msgs_path.decode('utf8') \
if not (msgs_path, unicode) else msgs_path
if locale == 'en' or not os.path.exists(msgs_path):
return
if locale and not locale == 'system':
os.environ['LANGUAGE'] = locale
gettext.bindtextdomain(textdomain, msgs_path)
gettext.textdomain(textdomain)
self.translate = gettext.gettext
def __call__(self, msg):
return self.translate(msg)
|
StarcoderdataPython
|
5075902
|
<gh_stars>0
import unittest
import os
from load_configuration import *
import shutil
import copy
def test_generateConfig(self):
generateConfig()
parser = configparser.ConfigParser()
parser.read(config_address)
self.assertEqual(dict(parser['general']), defaults)
def test_updateValue(self, key, value):
updateValue(key, value)
self.assertEqual(getConfig()['general'][key], value)
class LoadConfigTests(unittest.TestCase):
def test_generateConfig_fresh_file(self):
"""Testing generateConfig when it does not have to overwrite a file"""
if os.path.isfile(config_address):
os.remove(config_address)
test_generateConfig(self)
def test_generateConfig_no_config_folder(self):
"""Tests that generateConfig works when there is no directory to store the config file in"""
if os.path.isdir(config_address + "/.."):
shutil.rmtree(config_address + "/..")
test_generateConfig(self)
def test_generateConfig_overwrite_file(self):
"""Tests that generateConfig works when there is already a config file"""
if os.path.isfile(config_address):
os.remove(config_address)
open(config_address, "w").close()
if not os.path.isfile(config_address):
raise Exception
test_generateConfig(self)
def test_getConfig_when_there_is_a_config_file(self):
"""Tests that getConfig works when its values are the defaults"""
generateConfig()
self.assertEqual(getConfig()['general'], defaults)
def test_getConfig_but_there_is_no_config_file(self):
"""Tests getConfig when there is no config file"""
if os.path.isfile(config_address):
os.remove(config_address)
self.assertEqual(getConfig()['general'], defaults)
def test_getConfig_with_missing_keys_in_config_file(self):
"""Tests that getConfig will rewrite the config file if the config file does not contain all the keys in acceptedValues.keys()"""
#Write the bad config file
if not os.path.isdir(config_address + "/.."):
os.mkdir(config_address + "/..")
parser = configparser.ConfigParser()
bad_defaults = copy.deepcopy(defaults)
bad_defaults.pop('size')
parser['general'] = bad_defaults
with open(config_address, 'w') as configfile:
parser.write(configfile)
#Read the bad config file
config = getConfig()['general']
self.assertEqual(config, defaults)
def test_getConfig_with_extra_keys_in_config_file(self):
"""Tests that getConfig will rewrite the config file if the config file contains keys that aren't in acceptedValues.keys()"""
#Write the bad config file
if not os.path.isdir(config_address + "/.."):
os.mkdir(config_address + "/..")
parser = configparser.ConfigParser()
bad_defaults = copy.deepcopy(defaults)
bad_defaults['soize'] = 'starndard'
parser['general'] = bad_defaults
with open(config_address, 'w') as configfile:
parser.write(configfile)
#Read the bad config file
config = getConfig()['general']
self.assertEqual(config, defaults)
def test_updateValues(self):
"""Test that updateValue works for every key with a valid value"""
generateConfig()
test_updateValue(self, 'size', 'large')
test_updateValue(self, 'datacenter', 'Light')
test_updateValue(self, 'universalisupdatefrequency', '5')
def test_updateValues_with_bad_values(self):
"""Test that updateValue raises ValueError when given bad values for any key"""
generateConfig()
with self.assertRaises(ValueError):
updateValue('size', 'Chaos')
with self.assertRaises(ValueError):
updateValue('datacenter', '5')
with self.assertRaises(ValueError):
updateValue('universalisupdatefrequency', "standard")
def test_updateValues_with_bad_key(self):
"""Tests that updateValue raises a KeyError when given a key that isn't in acceptedValues.keys()"""
generateConfig()
with self.assertRaises(KeyError):
updateValue('boogaloo', 'Chaos')
|
StarcoderdataPython
|
3379417
|
import math
blockStart=[0,0]
def addBlock(x,y):
global blockStart
blocks.append({"x1":blockStart[0], "y1":blockStart[1], "x2":x, "y2":y, "exitVelX":0, "exitVelY":0, "accX":0, "accY":0, "blkTime":0})
blockStart = [x,y]
def bisector(p1, p2):
x1, y1 = p1
x2, y2 = p2
if (y1 == y2):
return 100000,
a = (x2-x1)/(y1-y2)
b = (y1+y2)/2-a*(x1+x2)/2
return a, b
def intersect(f1, f2):
a1, b1 = f1
a2, b2 = f2
x = (b1-b2)/(a2-a1)
y = a1*x+b1
return x, y
def distance(p1, p2):
x1, y1 = p1
x2, y2 = p2
return ((x1-x2)**2+(y1-y2)**2)**0.5
def circle(p1, p2, p3):
x1, y1 = p1
x2, y2 = p2
x3, y3 = p3
if y1==y2:
p3, p2 = p2, p3
elif y2==y3:
p1, p2 = p2, p1
center = intersect(bisector(p1, p2), bisector(p2, p3))
radius = distance(center, p1)
cx, cy = center
return '(x-{x0:.{w}f})^2 + (y-{y0:.{w}f})^2 = {r:.{w}f}^2'.format(w=2, x0=cx, y0=cy, r=radius)
def vecMag(v):
return math.sqrt(v[0]*v[0]+v[1]*v[1])
def vec(p1,p2):
x1, y1 = p1
x2, y2 = p2
p1p2 = (x2-x1,y2-y1)
return p1p2
def vecUnit(p1,p2):
x1, y1 = p1
x2, y2 = p2
p1p2 = (x2-x1,y2-y1)
p1p2Mag = math.sqrt(p1p2[0]*p1p2[0]+p1p2[1]*p1p2[1])
p1p2Unit = (p1p2[0]/p1p2Mag, p1p2[1]/p1p2Mag)
return p1p2Unit
def vecMult(v, s):
vRes = (v[0]*s, v[1]*s)
return vRes
def vecAdd(v1, v2):
return (v1[0]+v2[0],v1[1]+v2[1])
def lineSeg(p1,p2):
return (p1[0],p1[1],p2[0],p2[1])
def lineSegMidPt(ls):
return ((ls[0]+ls[2])/2,(ls[1]+ls[3])/2)
def circle2(p1, p2, p3):
x1, y1 = p1
x2, y2 = p2
x3, y3 = p3
# Vector p1p2
p1p2 = vec(p1,p2)
# Unit Vector p2p3
p2p3Unit = vecUnit(p2,p3)
# Make Vector p2p3 same magnitude as p1p2
p2p3T = vecMult(p2p3Unit, vecMag(p1p2))
p3T = vecAdd(p2, p2p3T)
print("p1p2", p1p2, "p2p3Unit", p2p3Unit, "p2p3T", p2p3T)
#
# # Find midpoint of line from p1 to p3T
# lineSegP1P3T = lineSeg(p1,p3T)
# ptD = lineSegMidPt(lineSegP1P3T)
# Length of line segment p1 to p3T
p1p3T = vec(p1, p3T)
lenP1P3T = vecMag(p1p3T)
# Length of line segment p1 to p2
lenP1P2 = vecMag(p1p2)
print("lenP1P2", lenP1P2, "lenP1P3T", lenP1P3T)
# Circle radius
circleRadius = lenP1P2 * (0.5 * lenP1P3T) / (math.sqrt(lenP1P2*lenP1P2 - 0.25*lenP1P3T*lenP1P3T))
# Perpendicular to p1p2Unit
p1p2Unit = vecUnit(p1,p2)
p1p2UnitPerp = (-p1p2Unit[1], p1p2Unit[0])
# Centre of circle
circleCentre = vecMult(p1p2UnitPerp, circleRadius)
print("circleRadius", circleRadius, "circleCentre", circleCentre)
return ""
blocks = []
addBlock(10,0)
addBlock(10,20)
# addBlock(0,10)
# addBlock(0,0)
print(blocks)
for loopIdx in range(len(blocks)-1):
# Current block
blkIdx = len(blocks)-1-loopIdx
curBlock = blocks[blkIdx]
# Prev block
prevBlock = blocks[blkIdx-1]
# X and Y distances in the block
distX = curBlock["x2"] - curBlock['x1']
distY = curBlock["y2"] - curBlock['y1']
print("MoveFrom", curBlock['x1'], curBlock['y1'], "To", curBlock["x2"], curBlock["y2"], "DistX", distX, "DistY", distY)
# Compute circle containing block start and end points
print(prevBlock['x1'])
print(circle2((prevBlock['x1'], prevBlock['y1']), (curBlock['x1'], curBlock['y1']), (curBlock['x2'], curBlock['y2'])))
|
StarcoderdataPython
|
8142533
|
<reponame>carlosep93/LANGSPEC
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This ref code is licensed under the license found in the LICENSE file in
# the root directory of this ref tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import itertools
import numpy as np
import os
import torch
from fairseq import options
from fairseq.data import (
data_utils, Dictionary, NliDataset, ConcatDataset,
IndexedRawTextDataset, IndexedCachedDataset, IndexedDataset
)
from . import FairseqTask, register_task
@register_task('nli')
class NliTask(FairseqTask):
"""
Translate from one (ref) language to another (hyp) language.
Args:
ref_dict (Dictionary): dictionary for the ref language
hyp_dict (Dictionary): dictionary for the hyp language
.. note::
The translation task is compatible with :mod:`train.py <train>`,
:mod:`generate.py <generate>` and :mod:`interactive.py <interactive>`.
The translation task provides the following additional command-line
arguments:
.. argparse::
:ref: fairseq.tasks.translation_parser
:prog:
"""
@staticmethod
def add_args(parser):
"""Add task-specific arguments to the parser."""
parser.add_argument('data', nargs='+', help='path(s) to data directorie(s)')
parser.add_argument('--ref-lang', default=None, metavar='REF',
help='reference language')
parser.add_argument('--hyp-lang', default=None, metavar='HYP',
help='hypothesis language')
parser.add_argument('--raw-text', action='store_true',
help='load raw text dataset')
parser.add_argument('--labels', action='store_true',
help='path to label file')
parser.add_argument('--left-pad-ref', default='True', type=str, metavar='BOOL',
help='pad the reference on the left')
parser.add_argument('--left-pad-hyp', default='False', type=str, metavar='BOOL',
help='pad the hypothesis on the left')
parser.add_argument('--max-ref-positions', default=1024, type=int, metavar='N',
help='max number of tokens in the reference sequence')
parser.add_argument('--max-hyp-positions', default=1024, type=int, metavar='N',
help='max number of tokens in the hypothesis sequence')
parser.add_argument('--upsample-primary', default=1, type=int,
help='amount to upsample primary dataset')
parser.add_argument('--ref-enc-path', type=str,
help='path to pretrained reference encoder file')
parser.add_argument('--ref-enc-key', type=str,
help='key (if any) of pretrained reference encoder')
parser.add_argument('--hyp-enc-path', type=str,
help='path to pretrained hypothesis encoder file')
parser.add_argument('--hyp-enc-key', type=str,
help='key (if any) of pretrained hypothesis encoder')
def __init__(self, args, ref_dict, hyp_dict):
super().__init__(args)
self.ref_dict = ref_dict
self.hyp_dict = hyp_dict
self.ref_enc_path= args.ref_enc_path
self.ref_enc_key = args.ref_enc_key
self.hyp_enc_path= args.hyp_enc_path
self.hyp_enc_key = args.hyp_enc_key
@classmethod
def setup_task(cls, args, **kwargs):
"""Setup the task (e.g., load dictionaries).
Args:
args (argparse.Namespace): parsed command-line arguments
"""
args.left_pad_ref = options.eval_bool(args.left_pad_ref)
args.left_pad_hyp = options.eval_bool(args.left_pad_hyp)
# find language pair automatically
if args.ref_lang is None or args.hyp_lang is None:
args.ref_lang, args.hyp_lang = data_utils.infer_language_pair(args.data[0])
if args.ref_lang is None or args.hyp_lang is None:
raise Exception('Could not infer language pair, please provide it explicitly')
# load dictionaries
ref_dict = Dictionary.load(os.path.join(args.data[0], 'dict.{}.txt'.format(args.ref_lang)))
hyp_dict = Dictionary.load(os.path.join(args.data[0], 'dict.{}.txt'.format(args.hyp_lang)))
assert ref_dict.pad() == hyp_dict.pad()
assert ref_dict.eos() == hyp_dict.eos()
assert ref_dict.unk() == hyp_dict.unk()
print('| [{}] dictionary: {} types'.format(args.ref_lang, len(ref_dict)))
print('| [{}] dictionary: {} types'.format(args.hyp_lang, len(hyp_dict)))
return cls(args, ref_dict, hyp_dict)
def load_dataset(self, split, combine=False, **kwargs):
"""Load a given dataset split.
Args:
split (str): name of the split (e.g., train, valid, test)
"""
def split_exists(split, ref, hyp, lang, data_path):
filename = os.path.join(data_path, '{}.{}-{}.{}'.format(split, ref, hyp, lang))
if self.args.raw_text and IndexedRawTextDataset.exists(filename):
return True
elif not self.args.raw_text and IndexedDataset.exists(filename):
return True
return False
def indexed_dataset(path, dictionary):
if self.args.raw_text:
return IndexedRawTextDataset(path, dictionary)
elif IndexedDataset.exists(path):
return IndexedCachedDataset(path, fix_lua_indexing=True)
return None
ref_datasets = []
hyp_datasets = []
data_paths = self.args.data
for dk, data_path in enumerate(data_paths):
# infer langcode
ref, hyp = self.args.ref_lang, self.args.hyp_lang
ref_datasets.append(indexed_dataset(data_path + '/' + '.'.join(['ref',split,ref]), self.ref_dict))
hyp_datasets.append(indexed_dataset(data_path + '/' + '.'.join(['hyp',split,hyp]), self.hyp_dict))
if not combine:
break
assert len(ref_datasets) == len(hyp_datasets)
if len(ref_datasets) == 1:
ref_dataset, hyp_dataset = ref_datasets[0], hyp_datasets[0]
else:
sample_ratios = [1] * len(ref_datasets)
sample_ratios[0] = self.args.upsample_primary
ref_dataset = ConcatDataset(ref_datasets, sample_ratios)
hyp_dataset = ConcatDataset(hyp_datasets, sample_ratios)
labels = '.'.join([data_paths[0] + '/' + 'lab', split, ref])
self.datasets[split] = NliDataset(
ref_dataset, ref_dataset.sizes, self.ref_dict,
hyp_dataset, hyp_dataset.sizes, self.hyp_dict,
label_path=labels,
max_reference_positions=self.args.max_ref_positions,
max_hypothesis_positions=self.args.max_hyp_positions,
)
def max_positions(self):
"""Return the max sentence length allowed by the task."""
return (self.args.max_ref_positions, self.args.max_hyp_positions)
def encoder_paths(self):
return self.ref_enc_path, self.hyp_enc_path
def encoder_keys(self):
return self.ref_enc_key, self.hyp_enc_key
@property
def ref_dictionary(self):
"""Return the ref :class:`~fairseq.data.Dictionary`."""
return self.ref_dict
@property
def hyp_dictionary(self):
"""Return the hyp :class:`~fairseq.data.Dictionary`."""
return self.hyp_dict
@property
def target_dictionary(self):
"""Return the hyp :class:`~fairseq.data.Dictionary`."""
return self.hyp_dict
def train_step(self, sample, model, criterion, optimizer, ignore_grad=False):
model.train()
agg_loss, agg_sample_size, agg_logging_output = 0., 0., {}
loss, sample_size, logging_output = criterion(model, sample)
if ignore_grad:
loss *= 0
optimizer.backward(loss)
agg_loss += loss.detach().item()
# TODO make summing of the sample sizes configurable
agg_sample_size += sample_size
agg_logging_output = logging_output
return agg_loss, agg_sample_size, agg_logging_output
def valid_step(self, sample, model, criterion):
model.eval()
with torch.no_grad():
agg_loss, agg_sample_size, agg_logging_output = 0., 0., {}
loss, sample_size, logging_output = criterion(model, sample)
agg_loss += loss.data.item()
# TODO make summing of the sample sizes configurable
agg_sample_size += sample_size
agg_logging_output = logging_output
return agg_loss, agg_sample_size, agg_logging_output
|
StarcoderdataPython
|
3564117
|
import numpy as np
import cv2
import matplotlib.pyplot as plt
import tensorflow as tf
from number import Number
from all_numbers_video import All_numbers
import linija as linija
# keras
from keras.models import Sequential
from keras.layers import Dense, Conv2D, Dropout, Flatten, MaxPooling2D
import test as testiranje
#rad sa slikom
def load_image(path):
return cv2.cvtColor(cv2.imread(path), cv2.COLOR_BGR2RGB)
def image_gray(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
def image_bin(image_gs):
#height, width = image_gs.shape[0:2]
#image_binary = np.ndarray((height, width), dtype=np.uint8)
ret,image_bin = cv2.threshold(image_gs, 127, 255, cv2.THRESH_BINARY)
return image_bin
def invert(image):
return 255-image
def display_image(image, color= False):
if color:
plt.imshow(image)
else:
plt.imshow(image, 'gray')
def dilate(image):
kernel = np.ones((3,3)) # strukturni element 3x3 blok
return cv2.dilate(image, kernel, iterations=1)
def erode(image):
kernel = np.ones((3,3), np.uint8) # strukturni element 3x3 blok
return cv2.erode(image, kernel, iterations=1)
def resize_region(region):
'''Transformisati selektovani region na sliku dimenzija 28x28'''
return cv2.resize(region,(28,28), interpolation = cv2.INTER_NEAREST)
def copy_number(img, number):
'''Kopira prepoznat broj na skaliranu povrsinu, da bi se slagao sa brojevima kojima je klasifikator obucavan'''
region = np.zeros((28, 28)).astype('float32')
granice = number.granice
x = granice[0] - 3
y = granice[1] - 3
w = granice[2] + 3
h = granice[3] + 3
modified_h = 28.0 - h
modified_w = 28.0 - w
y_off = int(modified_h / 2.0)
x_off = int(modified_w / 2.0)
for j in range(0, w):
for k in range(0, h):
flag = 0 <= y + k < img.shape[0] and 0 <= x + j < img.shape[1]
if flag:
region_x = y_off + k
region_y = x_off + j
img_x = y + k
img_y = x + j
region[region_x, region_y] = img[img_x, img_y] / 255.0
return region
def select_roi(image_orig, image_bin):
'''Oznaciti regione od interesa na originalnoj slici. (ROI = regions of interest)
Za svaki region napraviti posebnu sliku dimenzija 28 x 28.
Za označavanje regiona koristiti metodu cv2.boundingRect(contour).
Kao povratnu vrednost vratiti originalnu sliku na kojoj su obeleženi regioni
i niz slika koje predstavljaju regione sortirane po rastućoj vrednosti x ose
'''
img, contours, hierarchy = cv2.findContours(image_bin.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
sorted_regions = [] # lista sortiranih regiona po x osi (sa leva na desno)
regions_array = []
granice = []
for contour in contours:
x, y, w, h = cv2.boundingRect(contour) # koordinate i velicina granicnog pravougaonika
if h > 8:
trenutne = x, y, w, h
granice.append(Number((x, y, w, h), False))
region = image_bin[y:y + h + 1, x:x + w + 1]
regions_array.append([resize_region(region), (x, y, w, h)])
cv2.rectangle(image_orig, (x, y), (x + w, y + h), (0, 255, 0), 2)
regions_array = sorted(regions_array, key=lambda item: item[1][0])
sorted_regions = sorted_regions = [region[0] for region in regions_array]
return image_orig, granice
def analize_video(video_path, klas, all_copies):
frame_num = 0
cap = cv2.VideoCapture(video_path)
ok, frame_1 = cap.read()
cap.set(1, frame_num) # indeksiranje frejmova
all_frames = []
rezultat = 0
svi_brojevi = All_numbers()
moja_linija = linija.pronadji_liniju(frame_1)
# analiza videa frejm po frejm
while True:
frame_num += 1
ret_val, frame = cap.read()
# plt.imshow(frame)
# ako frejm nije zahvacen
if not ret_val:
break
all_frames.append(frame)
maska = linija.get_mask(frame, 127, 127, 127, 255, 255, 255)
bez_suma = cv2.bitwise_and(frame, frame, mask=maska)
bez_suma = image_bin(image_gray(bez_suma))
selected_frame,brojevi = select_roi(frame_1, bez_suma)
brojevi = svi_brojevi.update(brojevi)
for granica in brojevi:
flag = not granica.prosao_liniju and linija.prosao_broj(moja_linija, granica)
if flag:
copy = copy_number(bez_suma, granica)
all_copies.append(copy)
ulaz = copy.reshape(1, 784).astype('float32')
izlaz = int(klas.findNearest(ulaz, k=1)[0])
#ulaz = copy.reshape(1, 28, 28, 1).astype('float32')
#izlaz = klas.predict(ulaz)
rezultat += izlaz
granica.prosao_liniju = True
print('Izlaz', izlaz)
cap.release()
return all_frames, rezultat
def load_mnist():
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
return (x_train, y_train), (x_test, y_test)
def train_knn():
train, test = load_mnist()
x_train = train[0].reshape(60000, 784)
y_train = train[1].astype('float32')
x_train = x_train.astype('float32')
x_train /= 255
knn = cv2.ml.KNearest_create()
print("Train KNN")
knn.train(x_train, cv2.ml.ROW_SAMPLE, y_train)
print("Done...")
return knn
def train_neural_network():
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
# Reshaping the array to 4-dims so that it can work with the Keras API
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
input_shape = (28, 28, 1)
# Making sure that the values are float so that we can get decimal points after division
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
# Normalizing the RGB codes by dividing it to the max RGB value.
x_train /= 255
x_test /= 255
model = Sequential()
model.add(Conv2D(28, kernel_size=(3, 3), input_shape=input_shape))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten()) # Flattening the 2D arrays for fully connected layers
model.add(Dense(128, activation=tf.nn.relu))
model.add(Dropout(0.2))
model.add(Dense(10, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(x=x_train, y=y_train, epochs=3)
return model
#klasifikator = train_neural_network()
klasifikator = train_knn()
all_copies = []
with open('out.txt', 'w') as file:
file.write('RA 176/2015 Uros Jakovljevic\n')
file.write('Video\tsuma\t\n')
for i in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]:
all_frames, rezultat = analize_video('data/videos/video-' + str(i) + '.avi', klasifikator, all_copies)
with open('out.txt', 'a') as file:
file.write('video-' + str(i) + '\t' + str(rezultat) + '\n')
print(rezultat)
testiranje.test()
|
StarcoderdataPython
|
3563042
|
<reponame>rheinonen/hw_ml
import numpy as np
import xgboost as xgb
import sklearn as skl
import time
from matplotlib import pyplot as plt
#from keras.models import Sequential
#from keras.layers import Dense
from keras.models import load_model
#from keras import regularizers
#from keras.callbacks import EarlyStopping, ModelCheckpoint
#import sklearn as skl
#models = [xgb.Booster({'nthread': 2}) for i in range(0,5)]
#for i in range(0,5):
# models[i].load_model(str(i)+'.model')
n_models=10
# diagonal component of density flux
# enslist=(0.1, 10, 20, 30, 40,50)
# plt.xlabel('grad n')
# plt.ylabel('n flux (at grad u=0)')
#
# for ens in enslist:
# average_flux=np.zeros((1601,))
# predict_data=np.transpose([np.ones(1601)*ens,np.ones(1601), [i/100. for i in range(-800,801)],np.zeros(1601),np.zeros(1601)])
# for i in range(0,n_models):
# model = load_model("n_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-800,801)],average_flux)
# time.sleep(1)
# print(str(ens))
# plt.legend(['ens='+str(ens) for ens in enslist])
# plt.show()
#diagonal component of density flux---dependence on vorticity
# vortlist=(-6,-4,-2,0, 2, 4, 6)
# plt.xlabel('grad n')
# plt.ylabel('n flux')
# for vort in vortlist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*10,np.ones(1001)*vort, [i/100. for i in range(-500,501)],np.zeros(1001),np.zeros(1001)])
# for i in range(0,5):
# model = load_model("n_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(str(vort))
# plt.legend(['vort='+str(vort) for vort in vortlist])
# plt.show()
#density flux---dependence on both gradients
gradlist=(-4,-3,-2,-1, 0,1,2,3,4)
plt.xlabel('grad vort')
plt.ylabel('n flux')
for grad in gradlist:
average_flux=np.zeros((501,))
predict_data=np.transpose([np.ones(501)*10,np.ones(501), np.ones(501)*grad,[i/50. for i in range(-250,251)],np.zeros(501)])
for i in range(0,n_models):
model = load_model("n_"+str(i)+".h5")
predicted_flux=model.predict(predict_data)
average_flux=average_flux+predicted_flux
average_flux=average_flux/n_models
plt.plot([i/50. for i in range(-250,251)],average_flux)
time.sleep(1)
print(grad)
plt.legend(['grad n='+str(grad) for grad in gradlist])
plt.show()
#off-diagonal component of density flux ---dependence on enstrophy
enslist=(0.1, 10, 20, 30, 40,50)
# plt.xlabel('grad vort')
# plt.ylabel('n flux (at grad n=0)')
# for ens in enslist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*ens,np.ones(1001),np.zeros(1001),[i/100. for i in range(-500,501)],np.zeros(1001)])
# for i in (3,):# range(0,n_models):
# model = load_model("n_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# #average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(ens)
# plt.legend(['ens='+str(ens) for ens in enslist])
# plt.show()
#off-diagonal component of density flux --- dependence on vorticity
# vortlist=(-6, -4, -2, 0, 2,4,6)
# plt.xlabel('grad vort')
# plt.ylabel('n flux (at grad n=0)')
# for vort in vortlist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*10,np.ones(1001)*vort,np.zeros(1001),[i/100. for i in range(-500,501)],np.zeros(1001)])
# for i in range(0,n_models):
# model = load_model("n_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(vort)
# plt.legend(['vort='+str(vort) for vort in vortlist])
# plt.show()
#
#diagonal component of vorticity flux
# enslist=(0.1, 10, 20, 30, 40,50)
# plt.xlabel('grad vort')
# plt.ylabel('vort flux (at grad n=0)')
# for ens in enslist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*ens,np.ones(1001),np.zeros(1001),[i/100. for i in range(-500,501)],np.zeros(1001)])
# for i in range(0,n_models):
# model = load_model("vort_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(ens)
# plt.legend(['ens='+str(ens) for ens in enslist])
#
# plt.show()
#vort flux---dependence on both gradients
# gradlist=(-4,-3,-2,-1, 0,1,2,3,4)
#
# plt.xlabel('grad vort')
# plt.ylabel('vort flux')
# for grad in gradlist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*10,2*np.ones(1001), np.ones(1001)*grad,[i/100. for i in range(-500,501)],np.zeros(1001)])
# for i in range(0,n_models):
# model = load_model("vort_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(grad)
# plt.legend(['grad n='+str(grad) for grad in gradlist])
# plt.show()
#off-diagonal component of vorticity flux
# enslist=(0.1, 5,10,15,20)
# plt.xlabel('grad n')
# plt.ylabel('vort flux (at grad vort=0)')
# for ens in enslist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*ens,np.ones(1001),[i/100. for i in range(-500,501)],np.zeros(1001),np.zeros(1001)])
# for i in range(5,):#range(0,n_models):
# model = load_model("vort_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# #average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(ens)
# plt.legend(['ens='+str(ens) for ens in enslist])
#
# plt.show()
#enstrophy flux dependence on enstrophy
# enslist=(0.1,5, 10, 15, 20)
#
# plt.xlabel('grad ens')
# plt.ylabel('ens flux')
# for ens in enslist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*ens,np.zeros(1001),np.zeros(1001),np.zeros(1001),[i/100. for i in range(-500,501)]])
# for i in range(0,n_models):
# model = load_model("ens_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(ens)
# plt.legend(['ens='+str(ens) for ens in enslist])
# plt.show()
#enstrophy flux dependence on vorticity
# vortlist=(-2,-1,0,1,2)
#
# plt.xlabel('grad ens')
# plt.ylabel('ens flux')
# for vort in vortlist:
# average_flux=np.zeros((1001,))
# predict_data=np.transpose([np.ones(1001)*10,vort*np.ones(1001),np.zeros(1001),np.zeros(1001),[i/100. for i in range(-500,501)]])
# for i in range(0,n_models):
# model = load_model("ens_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# average_flux=average_flux/n_models
# plt.plot([i/100. for i in range(-500,501)],average_flux)
# time.sleep(1)
# print(vort)
# plt.legend(['vort='+str(vort) for vort in vortlist])
# plt.show()
# vorticity flux fluctuations
# vortlist=(-2,-1,0,1,2)
#
# plt.xlabel('grad ens')
# plt.ylabel('ens flux')
# for vort in vortlist:
# average_flux=np.zeros((51,))
# predict_data=np.transpose([[i for i in range(0,51)],vort*np.ones(51)])
# for i in range(0,):
# model = load_model("vort_std_"+str(i)+".h5")
# predicted_flux=model.predict(predict_data)
# average_flux=average_flux+predicted_flux
# #average_flux=average_flux/n_models
# plt.plot([i for i in range(0,51)],average_flux)
# time.sleep(1)
# print(vort)
# plt.legend(['vort='+str(vort) for vort in vortlist])
# plt.show()
|
StarcoderdataPython
|
1788792
|
#!/usr/bin/env python3
import json as jsonlib
import requests
import os
import hashlib
import argparse
# SOURCE: https://github.com/AdrianKoshka/flatpak-tools/blob/master/org.mozilla.Thunderbird/genman.py
# Setup arguments to be parsed
parser = argparse.ArgumentParser(description="Auto generates ScarlettOS' flatpak manifest")
parser.add_argument("-r", "--release", help="ScarlettOS release version")
parser.add_argument("-o", "--output", help="File to write to", default="org.scarlett.ScarlettOS.updated.json")
args = parser.parse_args()
# File to output the JSON to
output_file = args.output
# Version of the GNOME runtime to use
gnome_runtime = "3.24"
python_vers = "3.5.2"
# Take the thunderbird release from the '-r' or --release' argument
release = args.release
# A function which takes a URL, requests the content, and makes a sha256 hash
# of it, and then returns said hash
def hashsrc(url):
print("Getting " + url)
r = requests.get(url)
sha256 = hashlib.sha256()
sha256.update(r.content)
filechecksum = sha256.hexdigest()
return(filechecksum)
fin_args = [
# / * Allow access to developer tools * /
"--allow=devel",
"--talk-name=org.freedesktop.Flatpak",
# / * X11 + XShm access * /
"--socket=x11",
"--share=ipc",
# /* OpenGL */
"--device=dri",
# /* Wayland access */
"--socket=wayland",
# /* Audio output */
"--socket=pulseaudio",
# /* We want full fs access */
"--filesystem=host",
"--filesystem=home",
# /* Allow communication with network */
"--share=network",
"--talk-name=org.gtk.vfs.*",
# /* Needed for dconf to work (+ host or homedir read access from above) */
"--filesystem=xdg-run/dconf",
"--filesystem=~/.config/dconf:ro",
"--talk-name=ca.desrt.dconf",
"--env=DCONF_USER_CONFIG_DIR=.config/dconf",
# /* We need access to auth agents */
"--talk-name=org.freedesktop.secrets",
"--filesystem=xdg-run/keyring",
# / * Needed for various SSL certificates to work * /
"--env=SSL_CERT_DIR=/etc/ssl/certs",
"--filesystem=/etc/ssl:ro",
"--filesystem=/etc/pki:ro",
"--filesystem=/etc/ca-certificates:ro",
# / * Keep system terminal mappings * /
"--filesystem=/etc/inputrc:ro",
# / * Chromium uses a socket in tmp for its singleton check * /
"--filesystem=/tmp",
"--own-name=org.scarlett.Listener",
"--own-name=org.scarlett.Listener.*",
# Bunch of dbus session bus stuff
"--talk-name=org.freedesktop.DBus.Proprieties",
"--talk-name=org.freedesktop.IBus",
"--talk-name=org.freedesktop.Notifications",
# Applications sometimes need to interact with the desktop's file manager.
# SOURCE: https://www.freedesktop.org/wiki/Specifications/file-manager-interface/
"--talk-name=org.freedesktop.FileManager1",
# Gnome settings daemon
"--talk-name=org.gnome.SettingsDaemon.Color",
"--talk-name=org.freedesktop.PackageKit",
# Ability to talk to polkit
"--system-talk-name=org.freedesktop.PolicyKit1",
# Sysprof kernel based performance profiler for Linux
# SOURCE: https://github.com/GNOME/sysprof
"--system-talk-name=org.gnome.Sysprof2",
# gnome-code-assistance is a project which aims to provide common code assistance
# services for code editors(simple editors as well as IDEs). It is an effort to
# provide a centralized code - assistance as a service for the GNOME platform
# instead of having every editor implement their own solution.
# SOURCE: https://github.com/GNOME/gnome-code-assistance
"--talk-name=org.gnome.CodeAssist.v1.*",
"--system-talk-name=org.freedesktop.login1",
"--socket=session-bus",
"--system-talk-name=org.freedesktop.Avahi",
"--filesystem=~/.local/share/flatpak",
"--filesystem=/var/lib/flatpak",
"--filesystem=xdg-data/meson"
]
# # Define the finish-args
# fin_args = [
# "--share=ipc",
# "--socket=x11",
# "--device=dri",
# "--share=network",
# "--socket=pulseaudio",
# "--filesystem=~/.cache/thunderbird:create",
# "--filesystem=~/.thunderbird:create",
# "--filesystem=home:ro",
# "--filesystem=xdg-download:rw",
# "--filesystem=xdg-run/dconf",
# "--filesystem=~/.config/dconf:ro",
# "--talk-name=ca.desrt.dconf",
# "--env=DCONF_USER_CONFIG_DIR=.config/dconf",
# "--talk-name=org.a11y.*",
# "--talk-name=org.freedesktop.Notifications"
# ]
# Define the files/directories to cleanup
# clnup = [
# "/include",
# "/lib/pkgconfig",
# "/share/pkgconfig",
# "/share/aclocal",
# "/man",
# "/share/man",
# "*.la",
# "*.a"
# ]
clnup = []
# Define the structure for the build-options section
build_opts = {}
build_opts["cflags"] = "-O2 -g"
build_opts["cxxflags"] = "-O2 -g"
build_opts["env"] = {
"V": "1",
"BASH_COMPLETIONSDIR": "/app/share/bash-completion/completions",
"MOUNT_FUSE_PATH": "../tmp/"
}
# Define the modules section
mdles = []
#########################################################
# Autoconf sources
#########################################################
acsrcs = []
acsrc = {}
acsrc["type"] = "archive"
acsrc["url"] = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.13.tar.gz"
acsrc["sha256"] = hashsrc(acsrc["url"])
acsrcs.append(acsrc)
# Autoconf module
ac = {}
ac["name"] = "autoconf-2.13"
# ac["cleanup"] = ["*"]
ac["sources"] = acsrcs
ac["post-install"] = ["ln -s /app/bin/autoconf /app/bin/autoconf-2.13"]
mdles.append(ac)
#########################################################
#########################################################
# icu sources
#########################################################
icusrcs = []
icusrc = {}
icusrc["type"] = "archive"
icusrc["url"] = "http://download.icu-project.org/files/icu4c/60.1/icu4c-60_1-src.tgz"
icusrc["sha256"] = hashsrc(icusrc["url"])
icusrcs.append(icusrc)
# icu module
icu = {}
icu["name"] = "icu"
icu["cleanup"] = ["/bin/*", "/sbin/*"]
icu["sources"] = icusrc
icu["subdir"] = {"subdir": "source"}
mdles.append(icu)
#########################################################
#########################################################
# cpython sources
#########################################################
cpython_sources = []
cpythonsrc = {}
cpythonsrc["type"] = "archive"
cpythonsrc["url"] = "https://www.python.org/ftp/python/${python_vers}/Python-${python_vers}.tar.xz".format(python_vers=python_vers)
cpythonsrc["sha256"] = hashsrc(cpythonsrc["url"])
cpython_sources.append(cpythonsrc)
# pip source
pipsrc = {}
pipsrc["type"] = "file"
pipsrc["url"] = "https://files.pythonhosted.org/packages/ae/e8/2340d46ecadb1692a1e455f13f75e596d4eab3d11a57446f08259dee8f02/pip-10.0.1.tar.gz"
pipsrc["sha256"] = hashsrc(pipsrc["url"])
cpython_sources.append(pipsrc)
#########################################################
#########################################################
# setuptools source
#########################################################
stsrc = {}
stsrc["type"] = "file"
stsrc["url"] = "https://files.pythonhosted.org/packages/a6/5b/f399fcffb9128d642387133dc3aa9bb81f127b949cd4d9f63e5602ad1d71/setuptools-39.1.0.zip"
stsrc["sha256"] = hashsrc(stsrc["url"])
cpython_sources.append(stsrc)
# wheel source
wheelsrc = {}
wheelsrc["type"] = "file"
wheelsrc["url"] = "https://files.pythonhosted.org/packages/5d/c1/45947333669b31bc6b4933308dd07c2aa2fedcec0a95b14eedae993bd449/wheel-0.31.0.tar.gz"
wheelsrc["sha256"] = hashsrc(wheelsrc["url"])
cpython_sources.append(wheelsrc)
# cpython module
cpython = {}
cpython["name"] = "cpython"
# cpython["cleanup"] = ["*"]
cpython["sources"] = cpython_sources
cpython["post-install"] = [
"ls -lta `pwd`",
"/app/bin/python3 -m pip install --no-index --find-links=\"file://${PWD}\" --prefix=${FLATPAK_DEST} pip setuptools wheel"
]
cpython["build-options"] = {
"build-args": [
"--share=network",
"--allow=devel"
],
"config-opts": [
"--with-pydebug"
],
"cflags": "-O0 -g",
"cxxflags": "-O0 -g",
"strip": False,
"no-debuginfo": False
}
mdles.append(cpython)
#########################################################
# Thunderbird build-options
# app_data_and_icons_bopt = {}
# app_data_and_icons_bopt["clfags"] = "-fno-delete-null-pointer-checks -fno-lifetime-dse -fno-schedule-insns2"
# app_data_and_icons_bopt["cxxflags"] = "-fno-delete-null-pointer-checks -fno-lifetime-dse -fno-schedule-insns2"
# app_data_and_icons_bopt["env"] = {"VERSION": release}
# Thunderbird build-commands
app_data_and_icons_bc = [
"ls -lta",
"env",
"mkdir -p /app/share/metainfo/",
"mkdir -p /app/share/appdata/",
"mkdir -p /app/share/applications/",
"mkdir -p /app/cache/scarlett/",
"mkdir -p /app/share/icons/hicolor/64x64/apps/",
"cp org.scarlett.ScarlettOS.appdata.xml /app/share/metainfo/org.scarlett.ScarlettOS.appdata.xml",
"cp scarlettOS.png /app/share/icons/hicolor/64x64/apps/org.scarlett.ScarlettOS.png",
"cp org.scarlett.ScarlettOS.desktop /app/share/applications/org.scarlett.ScarlettOS.desktop"
]
# Thunderbird sources
appdata_and_icons_src = []
# mozconfig source
icons_install_src = {}
icons_install_src["type"] = "file"
icons_install_src["path"] = "../../data/icons-install.sh"
icons_install_src["dest-filename"] = "icons-install.sh"
appdata_and_icons_src.append(icons_install_src)
# .desktop file
dsk_install_shell_script = {}
dsk_install_shell_script["type"] = "file"
dsk_install_shell_script["path"] = "../../data/install-desktop-file.sh"
dsk_install_shell_script["dest-filename"] = "install-desktop-file.sh"
appdata_and_icons_src.append(dsk_install_shell_script)
# AppData source
png_data = {}
png_data["type"] = "file"
png_data["path"] = "../../data/icons/64x64/scarlettOS.png"
appdata_and_icons_src.append(png_data)
# AppData source
desktop_data = {}
desktop_data["type"] = "file"
desktop_data["path"] = "../../data/ScarlettOS.desktop"
desktop_data["dest-filename"] = "org.scarlett.ScarlettOS.desktop"
appdata_and_icons_src.append(desktop_data)
appdata_xml = {}
appdata_xml["type"] = "file"
appdata_xml["path"] = "../../data/ScarlettOS.appdata.xml"
appdata_xml["dest-filename"] = "org.scarlett.ScarlettOS.appdata.xml"
appdata_and_icons_src.append(appdata_xml)
# URL formation
# burl = "https://ftp.mozilla.org/pub/thunderbird/releases/"
# srcdir = "/source/"
# srctar = "thunderbird-" + release + ".source.tar.xz"
# full_url = burl + release + srcdir + srctar
# Thunderbird source tar
# tbtarsrc = {}
# tbtarsrc["type"] = "archive"
# tbtarsrc["url"] = full_url
# tbtarsrc["sha256"] = hashsrc(tbtarsrc["url"])
# appdata_and_icons_src.append(tbtarsrc)
# appdata_and_icons module
appdata_and_icons_module = {}
appdata_and_icons_module["name"] = "appdata_and_icons"
appdata_and_icons_module["buildsystem"] = "simple"
# appdata_and_icons_module["build-options"] = app_data_and_icons_bopt
appdata_and_icons_module["build-commands"] = app_data_and_icons_bc
appdata_and_icons_module["sources"] = appdata_and_icons_src
mdles.append(appdata_and_icons_module)
# Define the basic structure
base = {}
base["app-id"] = "org.scarlett.ScarlettOS"
base["runtime"] = gnome_runtime
base["sdk"] = "org.gnome.Sdk"
base["command"] = "/usr/bin/bash"
base["tags"] = ["nightly"]
base["finish-args"] = fin_args
base["build-options"] = build_opts
base["cleanup"] = clnup
base["modules"] = mdles
base["desktop-file-name-prefix"] = "(Nightly) "
base["copy-icon"] = True
json_data = jsonlib.dumps(base, indent=4, default=str)
# Spit out the JSON
with open(output_file, 'w') as f:
f.write(json_data)
|
StarcoderdataPython
|
3527353
|
<gh_stars>0
#!/usr/bin/env python
# coding: utf-8
import meshio
import pygmsh
import numpy as np
import copy
import glob
from collections import Counter
import os
import json
import shutil
import scipy.optimize as opt
from EnergyMinimization import *
import numba
# which line of input file defines me?
line=int(sys.argv[1])
# read in arguments from file
reader=open("Parameters.txt","r")
parameters=reader.readlines()[line].split()
# Target mesh size:
target_a = 0.2
# continuum shear modulus:
mu=1
# Energetic penalty for volume change
#B=1000000
B=50000
# Surface Constraint Energy
E=100
# The Material Nonlinearity parameter, between 0 and 1
MatNon=float(parameters[0])
axis=int(parameters[1])
khook = mu
# root folder for data
DataFolder='/mnt/jacb23-XDrive/Physics/ResearchProjects/ASouslov/RC-PH1229/ActiveElastocapillarity/2020-11-18-ModuliiCalibration/'+"alpha_"+"{0:0.2f}".format(MatNon)+"axis_"+"{0:d}".format(axis)+"/"
# Name of the current file
ScriptName="ElasticModuliiCalibrationCluster.py"
try:
os.mkdir(DataFolder)
except OSError:
print ("Creation of the directory %s failed" % DataFolder)
else:
print ("Successfully created the directory %s " % DataFolder)
# try and clear out the folder of vtk files and log files, if there was a previous run in it
for filename in glob.glob(DataFolder+'*.vtk')+glob.glob(DataFolder+'*.log'):
file_path = os.path.join(DataFolder, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
#Dump all the parameters to a file in the run folder
f=open(DataFolder+"Parameters.log","w+")
datadict= {
"a":target_a,
"B":B,
"mu":mu,
"alpha": MatNon
}
json.dump(datadict,f)
f.close()
# and for good measure, dump a copy of this code into the data file too
shutil.copyfile(ScriptName,DataFolder+ScriptName)
with pygmsh.occ.Geometry() as geom:
geom.characteristic_length_max = target_a
ellipsoid = geom.add_ball([0.0, 0.0, 0.0], 1)
InputMesh = geom.generate_mesh()
interiorbonds,edgebonds,boundarytris, bidxTotidx, tetras= MakeMeshData3D(InputMesh)
bonds=np.concatenate((interiorbonds,edgebonds))
orientedboundarytris=OrientTriangles(InputMesh.points,boundarytris,np.array([0,0,0]))
boundarytris=orientedboundarytris
BoundaryPoints= np.unique(edgebonds.ravel())
cells=[ ("line", bonds ), ("triangle",boundarytris ), ("tetra",tetras)]
isbond= np.ones(len(bonds))
isedgebond= np.concatenate( ( np.zeros(len(interiorbonds)),np.ones(len(edgebonds)) ) )
CellDataDict={'isedgebond':[isedgebond,np.zeros(len(boundarytris)),np.zeros(len(tetras))]
,'isbond':[isbond,np.zeros(len(boundarytris)),np.zeros(len(tetras))]}
OutputMesh=meshio.Mesh(InputMesh.points, cells, {},CellDataDict)
OutputMesh.write(DataFolder+"InitialMesh.vtk",binary=True)
# make the preferred rest lengths of the interior springs
interiorpairs=InputMesh.points[interiorbonds]
interiorvecs = np.subtract(interiorpairs[:,0,:],interiorpairs[:,1,:])
InteriorBondRestLengths=np.linalg.norm(interiorvecs,axis=1)
# make the preferred rest lengths of the edge springs. Initially have the at g0=1, but then
#update them in the loop
edgepairs=InputMesh.points[edgebonds]
edgevecs = np.subtract(edgepairs[:,0,:],edgepairs[:,1,:])
EdgeBondRestLengths=np.linalg.norm(edgevecs,axis=1)
r0_ij=np.concatenate((InteriorBondRestLengths,EdgeBondRestLengths))
# The volume constraint is simply that the target volume should be the initial volume
TargetVolumes=Volume3D_tetras(InputMesh.points,tetras)
for mode in ("Compression","Extension"):
Pout_ij =InputMesh.points
if mode=="Extension":
z0range=np.arange(1,1.6,0.05)
elif mode=="Compression":
z0range=np.arange(1,0.4,-0.05)
for z0 in z0range:
print("Current z0"+"{0:0.3f}".format(z0))
if axis==0:
lam=np.array([z0,1/np.sqrt(z0),1/np.sqrt(z0)])
elif axis==1:
lam=np.array([1/np.sqrt(z0),z0,1/np.sqrt(z0)])
elif axis==2:
lam=np.array([1/np.sqrt(z0),1/np.sqrt(z0),z0])
# minimize
history=[]
#def ModuliiEnergyEllipse(P,bondlist,tetras,r0_ij,khook,B,MatNon,TargetVolumes,lam,E,InputMesh,BoundaryPoints):
Pout_ij = opt.minimize(ModuliiEnergyEllipse, Pout_ij.ravel()
#,callback=mycallback
,options={'gtol':1e-03,'disp': True}
,args=(bonds
,tetras
,r0_ij
,khook
,B
,MatNon
,TargetVolumes
,lam
,E
,InputMesh.points
,BoundaryPoints)
).x.reshape((-1, 3))
Name="z0_"+"{0:0.3f}".format(z0)+".vtk"
#CalibrationOutput3D(Name,DataFolder,OutputMesh,P_ij,bondlist,orientedboundarytris,tetras,r0_ij,khook,B,MatNon,TargetVolumes,TopLayer=None,BottomLayer=None,z0=None,E=None,Fz=None,BoundaryPoints=None,InputMeshPoints=None):
CalibrationOutput3D(Name
,DataFolder= DataFolder
,OutputMesh=OutputMesh
,P_ij=Pout_ij
,bondlist=bonds
,orientedboundarytris=orientedboundarytris
,tetras=tetras
,r0_ij=r0_ij
,khook=khook
,B=B
,MatNon=MatNon
,TargetVolumes=TargetVolumes
,z0=z0
,lam=lam
,E=E
,BoundaryPoints=BoundaryPoints
,InputMeshPoints=InputMesh.points)
|
StarcoderdataPython
|
6557526
|
<filename>ode/adams_moulton_method.py<gh_stars>0
'''
Implements Adams-Moulton Method
'''
|
StarcoderdataPython
|
8142625
|
from letter_state import LetterState
class Pardle:
MAX_ATTEMPTS = 6
WORD_LENGTH = 5
def __init__(self, secret: str):
self.secret: str = secret.upper()
self.attempts = []
def attempt(self, word: str):
word = word.upper()
self.attempts.append(word)
def guess(self, word: str):
word = word.upper()
result = []
##TODO: Debuggare il caso in cui ci sono due lettere uguali ##
for i in range(self.WORD_LENGTH):
character = word[i]
letter = LetterState(character)
letter.is_in_word = character in self.secret
letter.is_in_position = character ==self.secret[i]
result.append(letter)
return result
@property
def is_solved(self):
return len(self.attempts)>0 and self.attempts[-1] == self.secret
@property
def remaining_attempts(self) -> int:
return self.MAX_ATTEMPTS - len(self.attempts)
@property
def can_attempt(self):
return (self.remaining_attempts > 0) and not (self.is_solved)
|
StarcoderdataPython
|
4845327
|
import re
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from productloader import load_product
from scrapy.http import FormRequest
from product_spiders.items import ProductLoader, Product
class gardenlinesSpider(BaseSpider):
name = "gardenlines.co.uk"
allowed_domains = ["www.gardenlines.co.uk"]
start_urls = ("http://www.gardenlines.co.uk/",)
def parse(self, response):
base_url = get_base_url(response)
hxs = HtmlXPathSelector(response)
content = hxs.select("//div[@class='Menu']/ul/li")
items = content.select(".//a/@href").extract()
for item in items:
yield Request(urljoin_rfc(base_url,item), callback=self.parse_subcat)
def parse_subcat(self,response):
hxs = HtmlXPathSelector(response)
base_url = get_base_url(response)
content = hxs.select("//div[@class='Menu']/ul/li/ul[@class='SubMenu']/li")
items = content.select(".//a/@href").extract()
for item in items:
yield Request(urljoin_rfc(base_url,item), callback=self.parse_items)
def parse_items(self,response):
hxs = HtmlXPathSelector(response)
base_url = get_base_url(response)
items = hxs.select("//div[@class='Content']/div/div/div/div/div[@class='MoreInfo']/@onclick").re(r'\'(.*)\'')
for item in items:
yield Request(urljoin_rfc(base_url,item), callback=self.parse_item)
def parse_item(self, response):
hxs = HtmlXPathSelector(response)
name = hxs.select("//div[@class='Content']/div/h1/text()").re(r'([a-zA-Z0-9\-\_\.\(\)\&\#\%\@\!\*][a-zA-Z0-9\-\_\.\(\)\&\#\%\@\!\* ]+)')
url = response.url
price = hxs.select("//div[@class='Content']/div/div//h5/text()").re(r'\xa3([\.0-9,]*)')
l = ProductLoader(item=Product(), response=response)
l.add_value('name', name)
l.add_value('url', url)
l.add_value('price', price)
yield l.load_item()
|
StarcoderdataPython
|
8087514
|
from ceci.main import run
from parsl import clear
import tempfile
import os
import pytest
import subprocess
from ceci.pipeline import Pipeline
from ceci_example.example_stages import *
from ceci.config import StageConfig
def test_config():
config_options = {'chunk_rows': 5000, 'something':float, 'free':None}
config = StageConfig(**config_options)
assert config['chunk_rows'] == 5000
assert config.chunk_rows == 5000
assert getattr(config, 'chunk_rows') == 5000
config.chunk_rows = 133
assert config.chunk_rows == 133
config.free = 'dog'
config.free = 42
try:
config.chunk_rows = 'a'
except TypeError:
pass
else:
raise RuntimeError("Failed to catch a type error")
try:
config['chunk_rows'] = 'a'
except TypeError:
pass
else:
raise RuntimeError("Failed to catch a type error")
assert config.chunk_rows == 133
config['new_par'] = 'abc'
assert config['new_par'] == 'abc'
assert config.get_type('new_par') == str
config.reset()
assert config.chunk_rows == 5000
assert config.get_type('chunk_rows') == int
values = config.values()
for key, value in config.items():
#assert value == config[key].value
assert value in values
def check_func(cfg, **kwargs):
for k, v in kwargs.items():
check_type = cfg.get_type(k)
if k is not None and v is not None:
assert check_type == type(v)
check_func(config, **config)
for k in iter(config):
assert k in config
def test_interactive_pipeline():
# Load the pipeline interactively, this is just a temp fix to
# get the run_config and stage_config
pipeline = Pipeline.read('tests/test.yml')
# pipeline.run()
dry_pipe = Pipeline.read('tests/test.yml', dry_run=True)
dry_pipe.run()
pipe2 = Pipeline.interactive()
overall_inputs = {'DM':'./tests/inputs/dm.txt',
'fiducial_cosmology':'./tests/inputs/fiducial_cosmology.txt'}
inputs = overall_inputs.copy()
inputs['metacalibration'] = True
inputs['config'] = None
pipe2.pipeline_files.update(**inputs)
pipe2.build_stage(PZEstimationPipe)
pipe2.build_stage(shearMeasurementPipe, apply_flag=False)
pipe2.build_stage(WLGCSelector, zbin_edges=[0.2, 0.3, 0.5], ra_range=[-5, 5])
pipe2.build_stage(SysMapMaker)
pipe2.build_stage(SourceSummarizer)
pipe2.build_stage(WLGCCov, aliases= dict(covariance='covariance_copy'))
pipe2.build_stage(WLGCRandoms)
pipe2.build_stage(WLGCTwoPoint)
pipe2.build_stage(WLGCSummaryStatistic, aliases=dict(covariance='covariance_copy'))
assert len(pipe2.WLGCCov.outputs) == 1
pipe2.initialize(overall_inputs, pipeline.run_config, pipeline.stages_config)
pipe2.print_stages()
pipe2.WLGCCov.print_io()
assert pipe2['WLGCCov'] == pipe2.WLGCCov
rpr = repr(pipe2.WLGCCov.config)
path = pipe2.pipeline_files.get_path('covariance_copy')
assert pipe2.pipeline_files.get_tag(path) == 'covariance_copy'
assert pipe2.pipeline_files.get_type('covariance_copy') == pipe2.WLGCCov.get_output_type('covariance')
pipe2.run()
def test_inter_pipe():
pipe2 = Pipeline.interactive()
overall_inputs = {'DM':'./tests/inputs/dm.txt',
'fiducial_cosmology':'./tests/inputs/fiducial_cosmology.txt'}
inputs = overall_inputs.copy()
inputs['config'] = None
pipe2.pipeline_files.update(**inputs)
pipe2.build_stage(PZEstimationPipe, name='bob')
assert isinstance(pipe2.bob, PZEstimationPipe)
pipe2.remove_stage('bob')
assert not hasattr(pipe2, 'bob')
if __name__ == "__main__":
test_config()
test_interactive()
|
StarcoderdataPython
|
3349483
|
import jax.numpy as jnp
from rA9.networks.module import Module
from .img2col import *
from .LIF_recall import *
class pool2d(Module):
def __init__(self, input, kernel_size, stride, tau, vth, dt, v_current):
super(pool2d, self).__init__()
self.input = input
self.kernel_size = kernel_size
self.stride = stride
self.tau = tau
self.vth = vth
self.dt = dt
self.v_current = v_current
self.spike_list, self.v_current = LIF_recall(tau=self.tau, Vth=self.vth, dt=self.dt, x=self.input,
v_current=self.v_current) # needto fix
def forward(self, input, kernel_size):
def jnp_fn(input_jnp, kernel_size):
return _pool_forward(input_jnp, kernel_size)
self.jnp_args = (input, kernel_size, self.spike_list)
out = jnp_fn(*self.jnp_args)
return out
def backward(self, grad_outputs, e_grad, timestep):
LIF_backward(self.tau, self.vth, grad_outputs,
spike_list=self.spike_list, e_grad=e_grad, time=timestep)
def _pool_forward(X, size=2, stride=2):
n, d, h, w = X.shape
h_out = (h - size) / stride + 1
w_out = (w - size) / stride + 1
if not w_out.is_integer() or not h_out.is_integer():
raise Exception('Invalid output dimension!')
h_out, w_out = int(h_out), int(w_out)
X_reshaped = X.reshape(n * d, 1, h, w)
X_col = im2col_indices(X_reshaped, size, size, padding=0, stride=stride)
max_idx = jnp.mean(jnp.sum(X_col, axis=0))
out = jnp.array(X_col[max_idx, range(max_idx.size)])
out = out.reshape(h_out, w_out, n, d)
out = jnp.transpose(out, (2, 3, 0, 1))
return out
|
StarcoderdataPython
|
11366725
|
from hetdesrun.component.registration import register
from hetdesrun.datatypes import DataType
def volatility(series, freq, stamped="right"):
"""A simple volatility measurement
Tries to measure volatility in a time series. Works by comparing sum of absolute
differences to the absolute value of the sum of differences on moving windows.
series: (Pandas.Series): Should have a datetime index and float values. The series of
which volatility is measured.
freq (String): Something like "2h" or "75min". Determines the rolling window size.
stamped (String): One of "left" or "right" or "center". Determines whether the resulting volatility
Series is timestamped left or right of the intervals. "center" only works if freq
is explicitely divisible by 2 (i.e. freq is something like "2h" or "4min"). You can make
center work on a frequency of "1h" by switching to "60min" instead!
Returns: A series of volatility "scores"
"""
diffs = series.sort_index().diff(1)
vols = diffs.abs().rolling(freq).sum() - diffs.rolling(freq).sum().abs()
vols.name = "volatilities"
if stamped == "left":
return vols.shift(freq=freq, periods=-1)
elif stamped == "right":
return vols
elif stamped == "center":
return vols.shift(
freq=freq, periods=-0.5
) # only works if freq is "divisible by 2"
else:
raise ValueError(
"Only 'left' or 'right' or 'center' allowed for stamping parameter."
)
# ***** DO NOT EDIT LINES BELOW *****
# These lines may be overwritten if input/output changes.
@register(
inputs={
"timeseries": DataType.Series,
"window_size": DataType.String,
"window_timestamp_location": DataType.String,
},
outputs={"volatilities": DataType.Series},
)
def main(*, timeseries, window_size, window_timestamp_location):
"""entrypoint function for this component"""
# ***** DO NOT EDIT LINES ABOVE *****
# write your function code here.
return {
"volatilities": volatility(timeseries, window_size, window_timestamp_location)
}
|
StarcoderdataPython
|
6590402
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import os
from azure.identity import DefaultAzureCredential
from azure.mgmt.resource import ResourceManagementClient
def main():
SUBSCRIPTION_ID = os.environ.get("SUBSCRIPTION_ID", None)
GROUP_NAME = "testgroupx"
RESOURCE_NAME = "pytestresource"
# Create client
# For other authentication approaches, please see: https://pypi.org/project/azure-identity/
resource_client = ResourceManagementClient(
credential=DefaultAzureCredential(),
subscription_id=SUBSCRIPTION_ID
)
# Create resource group
resource_client.resource_groups.create_or_update(
GROUP_NAME,
{"location": "eastus"}
)
# Check resource existence
check_result = resource_client.resources.check_existence(
resource_group_name=GROUP_NAME,
resource_provider_namespace="Microsoft.Compute",
parent_resource_path="",
resource_type="availabilitySets",
resource_name=RESOURCE_NAME,
api_version="2019-10-01"
)
print("Check resource existence:\n{}".format(check_result))
# Create resource
resource = resource_client.resources.begin_create_or_update(
resource_group_name=GROUP_NAME,
resource_provider_namespace="Microsoft.Compute",
parent_resource_path="",
resource_type="availabilitySets",
resource_name=RESOURCE_NAME,
parameters={'location': "eastus"},
api_version="2019-07-01"
).result()
print("Create resource:\n{}".format(resource))
# Get resource
resource = resource_client.resources.get(
resource_group_name=GROUP_NAME,
resource_provider_namespace="Microsoft.Compute",
parent_resource_path="",
resource_type="availabilitySets",
resource_name=RESOURCE_NAME,
api_version="2019-07-01"
)
print("Get resource:\n{}".format(resource))
# Update resource
resource = resource_client.resources.begin_update(
resource_group_name=GROUP_NAME,
resource_provider_namespace="Microsoft.Compute",
parent_resource_path="",
resource_type="availabilitySets",
resource_name=RESOURCE_NAME,
parameters={'tags': {"tag1": "value1"}},
api_version="2019-07-01"
).result()
print("Update resource:\n{}".format(resource))
# Delete resource
resource_client.resources.begin_delete(
resource_group_name=GROUP_NAME,
resource_provider_namespace="Microsoft.Compute",
parent_resource_path="",
resource_type="availabilitySets",
resource_name=RESOURCE_NAME,
api_version="2019-07-01"
).result()
print("Delete resource.\n")
# Delete Group
resource_client.resource_groups.begin_delete(
GROUP_NAME
).result()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
394118
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import os
import tempfile
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
import paddle.fluid.layers as layers
import paddle.fluid.core as core
from paddle.fluid import compiler, Program, program_guard
import paddle.fluid.proto.profiler.profiler_pb2 as profiler_pb2
class TestProfiler(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.environ['CPU_NUM'] = str(4)
def net_profiler(self,
state,
option,
iter_range=None,
use_parallel_executor=False):
profile_path = os.path.join(tempfile.gettempdir(), "profile")
open(profile_path, "w").write("")
startup_program = fluid.Program()
main_program = fluid.Program()
with fluid.program_guard(main_program, startup_program):
image = fluid.layers.data(name='x', shape=[784], dtype='float32')
hidden1 = fluid.layers.fc(input=image, size=64, act='relu')
i = layers.zeros(shape=[1], dtype='int64')
counter = fluid.layers.zeros(
shape=[1], dtype='int64', force_cpu=True)
until = layers.fill_constant([1], dtype='int64', value=10)
data_arr = layers.array_write(hidden1, i)
cond = fluid.layers.less_than(x=counter, y=until)
while_op = fluid.layers.While(cond=cond)
with while_op.block():
hidden_n = fluid.layers.fc(input=hidden1, size=64, act='relu')
layers.array_write(hidden_n, i, data_arr)
fluid.layers.increment(x=counter, value=1, in_place=True)
layers.less_than(x=counter, y=until, cond=cond)
hidden_n = layers.array_read(data_arr, i)
hidden2 = fluid.layers.fc(input=hidden_n, size=64, act='relu')
predict = fluid.layers.fc(input=hidden2, size=10, act='softmax')
label = fluid.layers.data(name='y', shape=[1], dtype='int64')
cost = fluid.layers.cross_entropy(input=predict, label=label)
avg_cost = fluid.layers.mean(cost)
batch_size = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(
input=predict, label=label, total=batch_size)
optimizer = fluid.optimizer.Momentum(learning_rate=0.001, momentum=0.9)
opts = optimizer.minimize(avg_cost, startup_program=startup_program)
place = fluid.CPUPlace() if state == 'CPU' else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
exe.run(startup_program)
if use_parallel_executor:
pe = fluid.ParallelExecutor(
state != 'CPU',
loss_name=avg_cost.name,
main_program=main_program)
pass_acc_calculator = fluid.average.WeightedAverage()
with profiler.profiler(state, 'total', profile_path, option) as prof:
for iter in range(10):
if iter == 2:
profiler.reset_profiler()
x = np.random.random((32, 784)).astype("float32")
y = np.random.randint(0, 10, (32, 1)).astype("int64")
if use_parallel_executor:
pe.run(feed={'x': x, 'y': y}, fetch_list=[avg_cost.name])
continue
outs = exe.run(main_program,
feed={'x': x,
'y': y},
fetch_list=[avg_cost, batch_acc, batch_size])
acc = np.array(outs[1])
b_size = np.array(outs[2])
pass_acc_calculator.add(value=acc, weight=b_size)
pass_acc = pass_acc_calculator.eval()
data = open(profile_path, 'rb').read()
if (len(data) > 0):
profile_pb = profiler_pb2.Profile()
profile_pb.ParseFromString(data)
self.assertGreater(len(profile_pb.events), 0)
for event in profile_pb.events:
if event.type == profiler_pb2.Event.GPUKernel:
if not event.detail_info and not event.name.startswith(
"MEM"):
raise Exception(
"Kernel %s missing event. Has this kernel been recorded by RecordEvent?"
% event.name)
elif event.type == profiler_pb2.Event.CPU and (
event.name.startswith("Driver API") or
event.name.startswith("Runtime API")):
print("Warning: unregister", event.name)
def test_cpu_profiler(self):
self.net_profiler('CPU', "Default")
self.net_profiler('CPU', "Default", use_parallel_executor=True)
@unittest.skipIf(not core.is_compiled_with_cuda(),
"profiler is enabled only with GPU")
def test_cuda_profiler(self):
self.net_profiler('GPU', "OpDetail")
self.net_profiler('GPU', "OpDetail", use_parallel_executor=True)
@unittest.skipIf(not core.is_compiled_with_cuda(),
"profiler is enabled only with GPU")
def test_all_profiler(self):
self.net_profiler('All', "AllOpDetail")
self.net_profiler('All', "AllOpDetail", use_parallel_executor=True)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
8151279
|
<filename>sols/1189.py
import collections
from collections import Counter
class Solution:
# Counter + Int Div (Accepted), O(n) time and space
def maxNumberOfBalloons(self, text: str) -> int:
txt_c, bal_c = Counter(text), Counter('balloon')
return min(txt_c[c] // bal_c[c] for c in bal_c)
# Counter + Int Div (Top Voted), O(n) time and space
def maxNumberOfBalloons(self, text: str) -> int:
cnt = collections.Counter(text)
cntBalloon = collections.Counter('balloon')
return min([cnt[c] // cntBalloon[c] for c in cntBalloon])
|
StarcoderdataPython
|
3534356
|
#!/usr/bin/env python
#
# Copyright 2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
class test_multiply_conjugate (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_000 (self):
src_data0 = (-2-2j, -1-1j, -2+2j, -1+1j,
2-2j, 1-1j, 2+2j, 1+1j,
0+0j)
src_data1 = (-3-3j, -4-4j, -3+3j, -4+4j,
3-3j, 4-4j, 3+3j, 4+4j,
0+0j)
exp_data = (12+0j, 8+0j, 12+0j, 8+0j,
12+0j, 8+0j, 12+0j, 8+0j,
0+0j)
src0 = blocks.vector_source_c(src_data0)
src1 = blocks.vector_source_c(src_data1)
op = blocks.multiply_conjugate_cc ()
dst = blocks.vector_sink_c ()
self.tb.connect(src0, (op,0))
self.tb.connect(src1, (op,1))
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data ()
self.assertEqual (exp_data, result_data)
if __name__ == '__main__':
gr_unittest.run(test_multiply_conjugate, "test_multiply_conjugate.xml")
|
StarcoderdataPython
|
4996087
|
<filename>src/utils.py
# -*- coding: utf-8 -*-
"""
Utils of the game.
@Author: yanyongyu
"""
__author__ = "yanyongyu"
def getHitmask(image):
"""returns a hitmask using an image's alpha."""
mask = []
for x in range(image.get_width()):
mask.append([])
for y in range(image.get_height()):
mask[x].append(bool(image.get_at((x, y))[3]))
return mask
def pixelCollision(rect1, rect2, hitmask1, hitmask2):
rect = rect1.clip(rect2)
if rect.width == 0 or rect.height == 0:
return False
x1, y1 = rect.x - rect1.x, rect.y - rect1.y
x2, y2 = rect.x - rect2.x, rect.y - rect2.y
for x in range(rect.width):
for y in range(rect.height):
if hitmask1[x1 + x][y1 + y] and hitmask2[x2 + x][y2 + y]:
return True
return False
|
StarcoderdataPython
|
8152417
|
<reponame>jna29/SymGP<gh_stars>1-10
import sys
# Add the symgp folder path to the sys.path list
module_path = r'/Users/jaduol/Documents/Uni (original)/Part II/IIB/MEng Project/'
if module_path not in sys.path:
sys.path.append(module_path)
from symgp import SuperMatSymbol, utils, MVG, Variable, SuperDiagMat, Kernel
from sympy import symbols, ZeroMatrix, Identity
m, n, l = symbols('m n l')
s_y = symbols('\u03c3_y')
K = Kernel()
u = Variable('u',m,1)
p_u = MVG([u],mean=ZeroMatrix(m,1),cov=K(u,u))
print("p_u:\n ", p_u)
f, fs, y = utils.variables("f f_{*} y",[n, l, n])
q_fgu = MVG([f], mean=K(f,u)*K(u,u).I*u,
cov=SuperDiagMat(K(f,f)-K(f,u)*K(u,u).I*K(u,f)),
cond_vars=[u],
prefix='q_{FITC}')
print("q_fgu:\n")
q_fsgu = MVG([fs], mean=K(fs,u)*K(u,u).I*u,
cov=K(fs,fs)-K(fs,u)*K(u,u).I*K(u,fs),
cond_vars=[u],
prefix='q_{FITC}')
print("q_fgu:\n ", q_fsgu)
# q(f,fs|u)
q_f_fs_g_u = q_fgu*q_fsgu
print("q_f_fs_g_u:\n ", q_f_fs_g_u)
# q(f,fs,u)
q_f_fs_u = q_f_fs_g_u*p_u
print("q_f_fs_u:\n ", q_f_fs_u)
# Effective prior: q(f,fs)
q_f_fs = q_f_fs_u.marginalise([u])
print("q_f_fs:\n ", q_f_fs)
p_ygf = MVG([y],mean=f,cov=s_y**2*Identity(n),cond_vars=[f])
print("p_ygf:\n ", p_ygf)
# q(f,fs,y)
q_f_fs_y = p_ygf*q_f_fs
print("q_f_fs_y:\n ", q_f_fs_y)
# q(f,fs|y)
q_f_fs_g_y = q_f_fs_y.condition([y])
print("q_f_fs_g_y:\n ", q_f_fs_g_y)
# q(fs|y)
q_fs_g_y = q_f_fs_g_y.marginalise([f])
print("q_fs_g_y:\n ", q_fs_g_y)
|
StarcoderdataPython
|
3392616
|
<filename>django_blog_comments/models.py
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
# Create your models here.
class Comments(models.Model):
sno = models.AutoField(primary_key=True)
comment_text = models.TextField()
user = models.ForeignKey(User,on_delete=models.CASCADE)
post = models.ForeignKey(settings.POST_MODEL,on_delete=models.CASCADE)
parent = models.ForeignKey('self',on_delete=models.CASCADE,null=True)
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f"{self.user} commented '{self.comment_text}'"
Post = settings.POST_MODEL
|
StarcoderdataPython
|
8080757
|
<reponame>BorisYourich/EurOPDX-Galaxy
#! /usr/bin/env python
from __future__ import print_function
"""
RSEM Alignment to transcriptome.
Version: 1.3.0
"""
import sys
import os
import shutil
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--seed-length', '--seed-length', default='25',
help="Seed length used by the read aligner. Providing the correct value is important for RSEM. [default: 25]")
parser.add_argument('--forward-prob', '--forward-prob', default='0.5',
help="Probability of generating a read from the forward strand of a transcript. (Default: 0.5)")
parser.add_argument('files', nargs='+',
help="The file[s] to use.")
# parser.add_argument('-reference', '--directory', dest='odir', default='.',
# help=
# 'Reference base file path with reference name e.g. /home/user/hg_38 '
# '[current directory]')
return parser.parse_args()
def main():
# args = parse_args()
sample_type = sys.argv[1]
interim_results_dir = '/galaxy/reference-data/rsem/'
print("[INFO] Sample Type is " + sample_type)
try:
if sample_type == 'single_end':
#sample_name = sys.argv[6]
sample_name = "sample"
rsem_threads = sys.argv[7]
rsem_stat = sys.argv[8]
command = "rsem-calculate-expression -p " + rsem_threads + \
" --phred33-quals --seed-length " + sys.argv[2] + \
" --forward-prob " + sys.argv[3] + \
" --sort-bam-memory-per-thread 2G " \
"--time " \
"--output-genome-bam " \
"--sort-bam-by-coordinate " \
"--bowtie2 " + \
sys.argv[4] + " " + \
interim_results_dir + sys.argv[5] + " " + \
sample_name
else:
#sample_name = sys.argv[7]
sample_name = "sample"
rsem_threads = sys.argv[8]
rsem_stat = sys.argv[9]
command = "rsem-calculate-expression -p " + rsem_threads + \
" --phred33-quals --seed-length " + sys.argv[2] + \
" --forward-prob " + sys.argv[3] + \
" --sort-bam-memory-per-thread 2G " \
"--time " \
"--output-genome-bam " \
"--sort-bam-by-coordinate " \
"--bowtie2 " \
"--paired-end " + \
sys.argv[4] + " " + sys.argv[5] + " " + \
interim_results_dir + sys.argv[6] + " " + \
sample_name
print('[INFO] Command: ' + command)
os.system(command)
except Exception as e:
print('Error while executing rsem-calculate-expression -> %s %s' % command % e)
sys.exit(1)
try:
shutil.copy(sample_name + ".stat/" + sample_name + ".cnt", rsem_stat)
except Exception as e:
print('Error saving the data in galaxy -> %s' % e)
if __name__ == '__main__':
main()
sys.exit(0)
|
StarcoderdataPython
|
11346208
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
import math
BN_MOMENTUM = 0.01
model_urls = {
'mobilenet_v2': 'https://download.pytorch.org/models/mobilenet_v2-b0353104.pth',
}
def conv_bn(inp, oup, stride):
conv_3x3=nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
for m in conv_3x3.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
return conv_3x3
def conv_1x1_bn(inp, oup):
conv1x1=nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True))
for m in conv1x1.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
return conv1x1
def deconv_bn_relu(in_channels,out_channels,kernel_size,padding,output_padding,bias):
deconv = nn.Sequential(
nn.ConvTranspose2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=2,
padding=padding,
output_padding=output_padding,
bias=bias),
nn.BatchNorm2d(out_channels, momentum=BN_MOMENTUM),
nn.ReLU(inplace=True)
)
for m in deconv.modules():
if isinstance(m, nn.ConvTranspose2d):
fill_up_weights(m)
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
return deconv
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = round(inp * expand_ratio)
self.use_res_connect = self.stride == 1 and inp == oup
if expand_ratio == 1:
self.conv = nn.Sequential(
# dw
nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# dw
nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetv2Det(nn.Module):
def __init__(self,heads,head_conv,width_mult=1.,is_train=True):
super(MobileNetv2Det, self).__init__()
self.inplanes = 32
self.last_channel=64 #backbone
self.deconv_with_bias = False
self.is_train=is_train
self.heads = heads
block = InvertedResidual
interverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
#build backbone
# building first layer
#assert input_size % 32 == 0
input_channel = int(self.inplanes * width_mult)
self.last_channel = int(self.last_channel * width_mult) if width_mult > 1.0 else self.last_channel
self.features = [conv_bn(3, input_channel, 2)]
# building inverted residual blocks
for t, c, n, s in interverted_residual_setting:
output_channel = int(c * width_mult)
for i in range(n):
if i == 0:
self.features.append(block(input_channel, output_channel, s, expand_ratio=t))
else:
self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))
input_channel = output_channel
# make it nn.Sequential
self.features = nn.Sequential(*self.features)
# building last several layers
self.backbone_lastlayer=conv_1x1_bn(input_channel, self.last_channel)
self.ups=[]
for i in range(3):
up=deconv_bn_relu(self.last_channel, self.last_channel, 2, 0, 0, self.deconv_with_bias)
self.ups.append(up)
self.ups=nn.Sequential(*self.ups)
self.conv_dim_matchs=[]
self.conv_dim_matchs.append(conv_1x1_bn(96, self.last_channel))
self.conv_dim_matchs.append(conv_1x1_bn(32, self.last_channel))
self.conv_dim_matchs.append(conv_1x1_bn(24, self.last_channel))
self.conv_dim_matchs=nn.Sequential(*self.conv_dim_matchs)
self.last_context_conv=conv_bn(self.last_channel,self.last_channel,1)
for head in sorted(self.heads):
num_output = self.heads[head]
#original centerface's structure
# fc = nn.Conv2d(
# in_channels=self.last_channel,
# out_channels=num_output,
# kernel_size=1,
# stride=1,
# padding=0
# )
# if 'hm' in head:
# fc.bias.data.fill_(-2.19)
# else:
# fill_fc_weights(fc)
# self.__setattr__(head, fc)
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(self.last_channel, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, num_output,
kernel_size=1, stride=1, padding=0))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(
in_channels=self.last_channel,
out_channels=num_output,
kernel_size=1,
stride=1,
padding=0
)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
def init_weights(self, pretrained=True):
if pretrained:
for head in self.heads:
final_layer = self.__getattr__(head)
for i, m in enumerate(final_layer.modules()):
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.weight.shape[0] == self.heads[head]:
if 'hm' in head:
nn.init.constant_(m.bias, -2.19)
else:
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
url = model_urls['mobilenet_v2']
pretrained_state_dict = model_zoo.load_url(url)
print('=> loading pretrained model {}'.format(url))
self.features.load_state_dict(pretrained_state_dict, strict=False)
else:
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.ConvTranspose2d):
fill_up_weights(m)
if m.bias is not None:
nn.init.zeros_(m.bias)
def forward(self, x):
xs=[]
for n in range(0, 4):
x = self.features[n](x)
xs.append(x)
for n in range(4, 7):
x = self.features[n](x)
xs.append(x)
for n in range(7, 14):
x = self.features[n](x)
xs.append(x)
for n in range(14, 18):
x = self.features[n](x)
x=self.backbone_lastlayer(x)
for i in range(3):
x=self.ups[i](x)
x=x+self.conv_dim_matchs[i](xs[3-i-1])
x=self.last_context_conv(x)
if self.is_train==True:
ret={}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
else:
ret=[]
for head in self.heads:
ret.append(self.__getattr__(head)(x))
return ret
def get_mv2relu_net(num_layers,heads, head_conv,is_train):
model =MobileNetv2Det(heads, head_conv=head_conv,width_mult=1.0,is_train=is_train)
model.init_weights()
return model
|
StarcoderdataPython
|
9632140
|
<reponame>npurcella/rtwo<gh_stars>1-10
"""
Create a mock driver and attempt to call each mock method that RTwo adds data
to!
"""
import unittest
from mock import Mock, patch
from rtwo.test.secrets import OPENSTACK_PARAMS
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import method_type
from libcloud.utils.py3 import u
from libcloud.common.types import LibcloudError
from libcloud.compute.types import Provider, KeyPairDoesNotExistError
from libcloud.compute.providers import get_driver
from libcloud.compute.drivers.openstack import (
OpenStackSecurityGroup, OpenStackSecurityGroupRule,
OpenStack_1_1_FloatingIpPool, OpenStack_1_1_FloatingIpAddress,
OpenStackKeyPair
)
from libcloud.test.compute.test_openstack import OpenStack_1_1_MockHttp, \
OpenStackMockHttp
from libcloud.test.compute.test_openstack import OpenStack_1_1_Tests
from rtwo.drivers.openstack_facade import OpenStack_Esh_Connection,OpenStack_Esh_NodeDriver
######
class OpenStackEshConnectionTest(unittest.TestCase):
def setUp(self):
self.timeout = 10
OpenStack_Esh_Connection.conn_classes = (None, Mock())
self.connection = OpenStack_Esh_Connection('foo', 'bar',
timeout=self.timeout,
ex_force_auth_url='https://127.0.0.1')
self.connection.driver = Mock()
self.connection.driver.name = 'OpenStackEshDriver'
def test_timeout(self):
self.connection.connect()
self.assertEqual(self.connection.timeout, self.timeout)
self.connection.conn_classes[1].assert_called_with(host='127.0.0.1',
port=443,
timeout=10)
class OpenStackEshDriverTest(OpenStack_1_1_Tests):
driver_args = OPENSTACK_PARAMS
driver_klass = OpenStack_Esh_NodeDriver
driver_type = OpenStack_Esh_NodeDriver
def setUp(self):
super(OpenStackEshDriverTest, self).setUp()
|
StarcoderdataPython
|
3592
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from config import CONFIG
import json
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
import io
import math
import os
import time
from absl import flags
from absl import logging
from easydict import EasyDict
import matplotlib
matplotlib.use('Agg')
FLAGS = flags.FLAGS
def visualize_batch(data, global_step, batch_size, num_steps):
"""Visualizes a batch."""
frames = data['frames']
frames_list = tf.unstack(frames, num=num_steps, axis=1)
frames_summaries = tf.concat(frames_list, axis=2)
batch_list = tf.split(frames_summaries, batch_size, axis=0)
batch_summaries = tf.concat(batch_list, axis=1)
tf.summary.image('train_batch', batch_summaries, step=global_step)
def visualize_nearest_neighbours(model, data, global_step, batch_size,
num_steps, num_frames_per_step, split):
"""Visualize nearest neighbours in embedding space."""
# Set learning_phase to False to use models in inference mode.
tf.keras.backend.set_learning_phase(0)
cnn = model['cnn']
emb = model['emb']
if 'tcn' in CONFIG.TRAINING_ALGO:
cnn_feats = get_cnn_feats(
cnn, data, training=False, num_steps=2 * num_steps)
emb_feats = emb(cnn_feats, 2 * num_steps)
emb_feats = tf.stack(
tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1)
else:
cnn_feats = get_cnn_feats(cnn, data, training=False)
emb_feats = emb(cnn_feats, num_steps)
emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1)
query_feats = emb_feats[0]
if CONFIG.OPTICALFLOW:
frames = data['video_frames']
else:
frames = data['frames']
image_list = tf.unstack(frames, num=batch_size, axis=0)
if 'tcn' in CONFIG.TRAINING_ALGO:
im_list = [image_list[0]
[num_frames_per_step - 1::num_frames_per_step][::2]]
else:
im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]]
sim_matrix = np.zeros(
(batch_size-1, num_steps, num_steps), dtype=np.float32)
for i in range(1, batch_size):
candidate_feats = emb_feats[i]
if 'tcn' in CONFIG.TRAINING_ALGO:
img_list = tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step,
axis=0)[num_frames_per_step - 1::num_frames_per_step][::2]
else:
img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step,
axis=0)[num_frames_per_step - 1::num_frames_per_step]
nn_img_list = []
for j in range(num_steps):
curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1])
mean_squared_distance = tf.reduce_mean(
tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1)
sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance)
nn_img_list.append(img_list[tf.argmin(mean_squared_distance)])
nn_img = tf.stack(nn_img_list, axis=0)
im_list.append(nn_img)
def vstack(im):
return tf.concat(tf.unstack(im, num=num_steps), axis=1)
summary_im = tf.expand_dims(tf.concat([vstack(im) for im in im_list],
axis=0), axis=0)
tf.summary.image('%s/nn' % split, summary_im, step=global_step)
# Convert sim_matrix to float32 as summary_image doesn't take float64
sim_matrix = sim_matrix.astype(np.float32)
tf.summary.image('%s/similarity_matrix' % split,
np.expand_dims(sim_matrix, axis=3), step=global_step)
def softmax(w, t=1.0):
e = np.exp(np.array(w) / t)
dist = e / np.sum(e)
return dist
def random_choice_noreplace(m, n, axis=-1):
# Generate m random permuations of range (0, n)
# NumPy version: np.random.rand(m,n).argsort(axis=axis)
return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64)
def gen_cycles(num_cycles, batch_size, cycle_len):
"""Generate cycles for alignment."""
random_cycles = random_choice_noreplace(
num_cycles, batch_size)[:, :cycle_len]
return random_cycles
def get_warmup_lr(lr, global_step, lr_params):
"""Returns learning rate during warm up phase."""
if lr_params.NUM_WARMUP_STEPS > 0:
global_steps_int = tf.cast(global_step, tf.int32)
warmup_steps_int = tf.constant(
lr_params.NUM_WARMUP_STEPS, dtype=tf.int32)
global_steps_float = tf.cast(global_steps_int, tf.float32)
warmup_steps_float = tf.cast(warmup_steps_int, tf.float32)
warmup_percent_done = global_steps_float / warmup_steps_float
warmup_lr = lr_params.INITIAL_LR * warmup_percent_done
is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32)
lr = (1.0 - is_warmup) * lr + is_warmup * warmup_lr
return lr
# Minimally adapted from Tensorflow object_detection code.
def manual_stepping(global_step, boundaries, rates):
boundaries = [0] + boundaries
num_boundaries = len(boundaries)
rate_index = tf.reduce_max(
tf.where(
tf.greater_equal(global_step, boundaries),
list(range(num_boundaries)), [0] * num_boundaries))
return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries))
def get_lr_fn(optimizer_config):
"""Returns function that provides current learning rate based on config.
NOTE: This returns a function as in Eager we need to call assign to update
the learning rate.
Args:
optimizer_config: EasyDict, contains params required to initialize the
learning rate and the learning rate decay function.
Returns:
lr_fn: function, this can be called to return the current learning rate
based on the provided config.
Raises:
ValueError: in case invalid params have been passed in the config.
"""
lr_params = optimizer_config.LR
# pylint: disable=g-long-lambda
if lr_params.DECAY_TYPE == 'exp_decay':
def lr_fn(lr, global_step): return tf.train.exponential_decay(
lr,
global_step,
lr_params.EXP_DECAY_STEPS,
lr_params.EXP_DECAY_RATE,
staircase=True)()
elif lr_params.DECAY_TYPE == 'manual':
lr_step_boundaries = [int(x)
for x in lr_params.MANUAL_LR_STEP_BOUNDARIES]
f = lr_params.MANUAL_LR_DECAY_RATE
learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p
for p in range(len(lr_step_boundaries) + 1)]
def lr_fn(lr, global_step): return manual_stepping(
global_step, lr_step_boundaries, learning_rate_sequence)
elif lr_params.DECAY_TYPE == 'fixed':
def lr_fn(lr, global_step): return lr_params.INITIAL_LR
elif lr_params.DECAY_TYPE == 'poly':
def lr_fn(lr, global_step): return tf.train.polynomial_decay(
lr,
global_step,
CONFIG.TRAIN.MAX_ITERS,
end_learning_rate=0.0,
power=1.0,
cycle=False)
else:
raise ValueError('Learning rate decay type %s not supported. Only support'
'the following decay types: fixed, exp_decay, manual,'
'and poly.')
return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step),
global_step, lr_params))
def get_optimizer(optimizer_config, learning_rate):
"""Returns optimizer based on config and learning rate."""
if optimizer_config.TYPE == 'AdamOptimizer':
opt = tf.keras.optimizers.Adam(learning_rate=learning_rate)
elif optimizer_config.TYPE == 'MomentumOptimizer':
opt = tf.keras.optimizers.SGD(
learning_rate=learning_rate, momentum=0.9)
else:
raise ValueError('Optimizer %s not supported. Only support the following'
'optimizers: AdamOptimizer, MomentumOptimizer .')
return opt
def get_lr_opt_global_step():
"""Intializes learning rate, optimizer and global step."""
optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR)
global_step = optimizer.iterations
learning_rate = optimizer.learning_rate
return learning_rate, optimizer, global_step
def create_ckpt(logdir, restore=False, **ckpt_objects):
# Since model is a dict we can insert multiple modular networks in this dict.
checkpoint = tf.train.Checkpoint(**ckpt_objects)
ckpt_manager = tf.train.CheckpointManager(
checkpoint,
directory=logdir,
max_to_keep=10,
keep_checkpoint_every_n_hours=1)
status = checkpoint.restore(
ckpt_manager.latest_checkpoint) if restore else -1
return ckpt_manager, status, checkpoint
def restore_ckpt(logdir, **ckpt_objects):
"""Create and restore checkpoint (if one exists on the path)."""
# Instantiate checkpoint and restore from any pre-existing checkpoint.
# Since model is a dict we can insert multiple modular networks in this dict.
checkpoint = tf.train.Checkpoint(**ckpt_objects)
ckpt_manager = tf.train.CheckpointManager(
checkpoint,
directory=logdir,
max_to_keep=10,
keep_checkpoint_every_n_hours=1)
status = checkpoint.restore(ckpt_manager.latest_checkpoint)
return ckpt_manager, status, checkpoint
def to_dict(config):
if isinstance(config, list):
return [to_dict(c) for c in config]
elif isinstance(config, EasyDict):
return dict([(k, to_dict(v)) for k, v in config.items()])
else:
return config
def setup_train_dir(logdir, overwrite=False, force_train=True):
"""Setups directory for training."""
tf.io.gfile.makedirs(logdir)
config_path = os.path.join(logdir, 'config.json')
if not os.path.exists(config_path) or overwrite:
logging.info(
'Using the existing passed in config as no config.json file exists in '
'%s', logdir)
with tf.io.gfile.GFile(config_path, 'w') as config_file:
config = dict([(k, to_dict(v)) for k, v in CONFIG.items()])
json.dump(config, config_file, sort_keys=True, indent=4)
else:
logging.info(
'Using config from config.json that exists in %s.', logdir)
with tf.io.gfile.GFile(config_path, 'r') as config_file:
config_dict = json.load(config_file)
CONFIG.update(config_dict)
train_logs_dir = os.path.join(logdir, 'train.logs')
if os.path.exists(train_logs_dir) and not force_train:
raise ValueError('You might be overwriting a directory that already '
'has train_logs. Please provide a new logdir name in '
'config or pass --force_train while launching script.')
tf.io.gfile.makedirs(train_logs_dir)
def setup_eval_dir(logdir, config_timeout_seconds=1):
"""Setups directory for evaluation."""
tf.io.gfile.makedirs(logdir)
tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs'))
config_path = os.path.join(logdir, 'config.json')
while not tf.io.gfile.exists(config_path):
logging.info('Waiting for config to exist. Going to sleep '
' %s for secs.', config_timeout_seconds)
time.sleep(config_timeout_seconds)
while True:
with tf.io.gfile.GFile(config_path, 'r') as config_file:
config_dict = json.load(config_file)
if config_dict is None:
time.sleep(config_timeout_seconds)
else:
break
CONFIG.update(config_dict)
def get_data(iterator):
"""Return a data dict which contains all the requested sequences."""
data = iterator.get_next()
return data, data['chosen_steps'], data['seq_lens']
@tf.function
def get_cnn_feats(cnn, data, training, num_steps=None):
"""Passes data through base CNN."""
if num_steps is None:
if training:
num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS
else:
num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS
cnn.num_steps = num_steps
cnn_feats = cnn(data['frames'])
return cnn_feats
def get_context_steps(step):
num_steps = CONFIG.DATA.NUM_STEPS
stride = CONFIG.DATA.FRAME_STRIDE
# We don't want to see the future.
steps = np.arange(step - (num_steps - 1) * stride, step + stride, stride)
return steps
def get_indices(curr_idx, num_steps, seq_len):
steps = range(curr_idx, curr_idx + num_steps)
single_steps = np.concatenate([get_context_steps(step) for step in steps])
single_steps = np.concatenate(np.array(list(map(get_context_steps,
np.arange(curr_idx, curr_idx + num_steps)))))
single_steps = np.maximum(0, single_steps)
single_steps = np.minimum(seq_len, single_steps)
return single_steps
def get_embeddings_dataset(model, iterator, frames_per_batch,
keep_data=False, optical_flow=False, keep_labels=True,
max_embs=None, callbacks=[]):
"""Get embeddings from a one epoch iterator."""
keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS
num_frames_per_step = CONFIG.DATA.NUM_STEPS
cnn = model['cnn']
emb = model['emb']
embs_list = []
labels_list = []
steps_list = []
seq_lens_list = []
names_list = []
seq_labels_list = []
if keep_data:
frames_list = []
if optical_flow:
frame_original_list = []
n = 0
def cond(n):
if max_embs is None:
return True
else:
return n < max_embs
# Make Recurrent Layers stateful, set batch size.
# We do this as we are embedding the whole sequence and that can take
# more than one batch to be passed and we don't want to automatically
# reset hidden states after each batch.
if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':
for gru_layer in emb.gru_layers:
gru_layer.stateful = True
gru_layer.input_spec[0].shape = [1, ]
while cond(n):
try:
print(n)
embs = []
labels = []
steps = []
seq_lens = []
names = []
seq_labels = []
if keep_data:
frames = []
if optical_flow:
frame_original = []
# Reset GRU states for each video.
if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':
for gru_layer in emb.gru_layers:
gru_layer.reset_states()
data, chosen_steps, seq_len = get_data(iterator)
seq_len = seq_len.numpy()[0]
num_batches = int(math.ceil(float(seq_len)/frames_per_batch))
for i in range(num_batches):
if (i + 1) * frames_per_batch > seq_len:
num_steps = seq_len - i * frames_per_batch
else:
num_steps = frames_per_batch
curr_idx = i * frames_per_batch
curr_data = {}
for k, v in data.items():
# Need to do this as some modalities might not exist.
if len(v.shape) > 1 and v.shape[1] != 0:
idxes = get_indices(curr_idx, num_steps, seq_len)
curr_data[k] = tf.gather(v, idxes, axis=1)
else:
curr_data[k] = v
cnn_feats = get_cnn_feats(cnn, curr_data,
num_steps=num_frames_per_step * num_steps,
training=False)
emb_feats = emb(cnn_feats, num_steps)
logging.debug('On sequence number %d, frames embedded %d', n,
curr_idx + num_steps)
# np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy())
# np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data["frames"])
# np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy())
# np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy())
embs.append(emb_feats.numpy())
for f in callbacks:
f(np.concatenate(embs), data, chosen_steps, seq_len)
steps.append(chosen_steps.numpy()[0])
seq_lens.append(seq_len * [seq_len])
all_labels = data['frame_labels'].numpy()[0]
name = data['name'].numpy()[0]
names.append(seq_len * [name])
seq_label = data['seq_labels'].numpy()[0]
seq_labels.append(seq_len * [seq_label])
labels.append(all_labels)
embs = np.concatenate(embs, axis=0)
labels = np.concatenate(labels, axis=0)
steps = np.concatenate(steps, axis=0)
seq_lens = np.concatenate(seq_lens, axis=0)
names = np.concatenate(names, axis=0)
seq_labels = np.concatenate(seq_labels, axis=0)
if keep_data:
frames.append(data['frames'].numpy()[0])
frames = np.concatenate(frames, axis=0)
if optical_flow:
frame_original.append(data['video_frames'].numpy()[0])
frame_original = np.concatenate(frame_original, axis=0)
if keep_labels:
labels = labels[~np.isnan(embs).any(axis=1)]
assert len(embs) == len(labels)
seq_labels = seq_labels[~np.isnan(embs).any(axis=1)]
names = names[~np.isnan(embs).any(axis=1)]
seq_lens = seq_lens[~np.isnan(embs).any(axis=1)]
steps = steps[~np.isnan(embs).any(axis=1)]
if keep_data:
frames = frames[~np.isnan(embs).any(axis=1)]
if optical_flow:
frame_original = frame_original[~np.isnan(embs).any(axis=1)]
embs = embs[~np.isnan(embs).any(axis=1)]
assert len(embs) == len(seq_lens)
assert len(embs) == len(steps)
assert len(names) == len(steps)
embs_list.append(embs)
if keep_labels:
labels_list.append(labels)
seq_labels_list.append(seq_labels)
steps_list.append(steps)
seq_lens_list.append(seq_lens)
names_list.append(names)
if keep_data:
frames_list.append(frames)
if optical_flow:
frame_original_list.append(frame_original)
n += 1
except tf.errors.OutOfRangeError:
logging.info('Finished embedding the dataset.')
break
dataset = {'embs': embs_list,
'seq_lens': seq_lens_list,
'steps': steps_list,
'names': names_list,
'seq_labels': seq_labels_list}
if keep_data:
dataset['frames'] = frames_list
if optical_flow:
dataset['frames_original'] = frame_original_list
if keep_labels:
dataset['labels'] = labels_list
# Reset statefulness to recurrent layers for other evaluation tasks.
if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':
for gru_layer in emb.gru_layers:
gru_layer.stateful = False
return dataset
def gen_plot(x, y):
"""Create a pyplot, save to buffer and return TB compatible image."""
plt.figure()
plt.plot(x, y)
plt.title('Val Accuracy')
plt.ylim(0, 1)
plt.tight_layout()
buf = io.BytesIO()
plt.savefig(buf, format='png')
buf.seek(0)
# Convert PNG buffer to TF image
image = tf.image.decode_png(buf.getvalue(), channels=4)
# Add the batch dimension
image = tf.expand_dims(image, 0)
return image
class Stopwatch(object):
"""Simple timer for measuring elapsed time."""
def __init__(self):
self.reset()
def elapsed(self):
return time.time() - self.time
def done(self, target_interval):
return self.elapsed() >= target_interval
def reset(self):
self.time = time.time()
def set_learning_phase(f):
"""Sets the correct learning phase before calling function f."""
def wrapper(*args, **kwargs):
"""Calls the function f after setting proper learning phase."""
if 'training' not in kwargs:
raise ValueError('Function called with set_learning_phase decorator which'
' does not have training argument.')
training = kwargs['training']
if training:
# Set learning_phase to True to use models in training mode.
tf.keras.backend.set_learning_phase(1)
else:
# Set learning_phase to False to use models in inference mode.
tf.keras.backend.set_learning_phase(0)
return f(*args, **kwargs)
return wrapper
def load_config(config_path):
config = None
if os.path.exists(config_path):
with open(config_path) as f:
config = json.load(f)
assert config is not None, "config file is not provided or is corrupted"
return config
def prepare_gpu(ind=-1):
ind = int(ind)
GPUS = tf.config.experimental.list_physical_devices('GPU')
if GPUS:
if ind > -1:
tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU')
try:
# Currently, memory growth needs to be the same across GPUs
for gpu in GPUS:
tf.config.experimental.set_memory_growth(gpu, True)
logical_gpus = tf.config.experimental.list_logical_devices('GPU')
logging.info([len(GPUS), "Physical GPUs,", len(logical_gpus),
"Logical GPUs"])
except RuntimeError as e:
# Memory growth must be set before GPUs have been initialized
logging.info(e)
os.environ["CUDA_VISIBLE_DEVICES"] = str(ind)
|
StarcoderdataPython
|
11212793
|
<gh_stars>0
from .menus import *
from .misc import Loading
from .paginators import BasePaginator
|
StarcoderdataPython
|
1670606
|
import pygame
from .chars import Enemy
from .shells import *
import os
# Working file paths
BASE_PATH = os.path.dirname(__file__)
IMAGES_PATH = os.path.join(BASE_PATH, 'resources/Images/')
# virus_1
class Virus1(Enemy):
ammo = Virus1shell
height = 78
width = 78
hp = 100
health_max = 100
# Virus_2
class Virus2(Enemy):
ammo = Virus2shell
height = 78
width = 78
hp = 200
health_max = 200
# Virus3
class Virus3(Enemy):
ammo = Virus3shell
height = 156
width = 156
hp = 300
health_max = 300
# Virus4
class Virus4(Enemy):
ammo = Virus4shell
height = 156
width = 156
hp = 350
health_max = 350
# virus_boss
class VirusBoss(Enemy):
ammo = Virus1shell
height = 320
width = 320
hp = 500
health_max = 500
spawn_cooldown = 70
enemy_list = []
enemy = Virus2
# Loading the boss health bar
health_bar = pygame.image.load(IMAGES_PATH + "HUD/bossbar.png")
# Function to spawn enemies
def spawn_enemies(self, player):
# checking if there is room to spawn enemies
if self.spawn_cooldown <= 0 and len(self.enemy_list) < 11:
self.spawn_cooldown = 70
# checking for the distance from player
if 700 > player.x - self.x or 700 > self.x - player.x:
self.enemy_list.append(self.enemy(self.x - 50, 500))
self.enemy_list[-1].load_anim(IMAGES_PATH + "/Characters/Virus/Virus_2/idle.png",
IMAGES_PATH + "Projectiles/virus_1_")
self.enemy_list[-1].Tracking = True
else:
self.spawn_cooldown -= 1
# Checking for damage by player
def check_hurt(self, player):
if player.current_weapon != 0:
if not self.enemy_list:
ammo_list = player.weapons[player.weapon_list[player.current_weapon]].ammo_list
for ammo in ammo_list:
if self.x + self.width > ammo.x > self.x and self.y + self.height > ammo.y > self.y:
self.hp -= ammo.damage
ammo_list.pop(ammo_list.index(ammo))
# Update function for boss health-bar
def update_health_bar(self, win):
if self.hp > 0:
win.blit(self.health_bar, (430, 22), (0, 0, (self.hp/self.health_max) * 500, 20))
# function to kill player if in the virus
def kill_on_contact(self, player):
if self.x + self.width > player.x > self.x and self.y + self.height > player.y > self.y:
player.hp = 0
|
StarcoderdataPython
|
16094
|
if __name__ == '__main__':
from .system import MyApp
MyApp().run()
|
StarcoderdataPython
|
6569743
|
def test_if_template_json_loads_successfully():
from metadata.v140.template import OEMETADATA_V140_TEMPLATE
def test_template_against_schema_which_should_succeed():
import jsonschema
from metadata.v140.template import OEMETADATA_V140_TEMPLATE
from metadata.v140.schema import OEMETADATA_V140_SCHEMA
assert jsonschema.validate(OEMETADATA_V140_TEMPLATE, OEMETADATA_V140_SCHEMA) == None
|
StarcoderdataPython
|
1661560
|
"""
Simple custom commands to send preconfigured text messages to channels.
Config:
commands ((str, str) dict):
Mapping from command name to rich response text.
"""
import immp
from immp.hook.command import command, DynamicCommands
class TextCommandHook(immp.Hook, DynamicCommands):
"""
Command provider to send configured text responses.
"""
schema = immp.Schema({"commands": {str: str}})
def commands(self):
return {self._response.complete(name, name) for name in self.config["commands"]}
@command()
async def _response(self, name, msg):
text = immp.RichText.unraw(self.config["commands"][name], self.host)
await msg.channel.send(immp.Message(text=text))
|
StarcoderdataPython
|
5061921
|
<filename>flask-api/service/movie.py
from app import db
from exception.movie_exists import MovieExists
from exception.resource_not_found import ResourceNotFound
from model.genre import Genre
from model.movie import Movie
from model.rating import Rating
from model.role import Role
from model.user import User
from schema.genre import GenreSchema
from schema.movie import MovieSchema
from schema.rating_movie import RatingMovieSchema
from schema.rating_movie_id import RatingMovieIdSchema
class MovieService:
@staticmethod
def create(movie: Movie) -> dict:
db_movie = Movie.query.filter(Movie.name == movie['name'])
if db_movie:
raise MovieExists("Movie already exists")
new_movie = Movie(
name=movie['name'],
description=movie['description'],
year=movie['year'],
duration=movie['duration'])
for genre in movie['genres']:
new_movie.genres.append(Genre.query.get(genre['id']))
db.session.add(new_movie)
db.session.commit()
return MovieSchema().dump(new_movie)
@staticmethod
def get_all() -> dict:
return MovieSchema(many=True).dump(
Movie.query.all())
@staticmethod
def get_all_by_genre(genre_name) -> dict:
return MovieSchema(many=True).dump(
Movie.query.join(Movie.genres).filter(Genre.name == genre_name).all())
@staticmethod
def get_by_id(movie_id: int, username: str) -> dict:
movie = Movie.query.get(movie_id)
if not movie:
raise ResourceNotFound("Movie not found")
has_rated = MovieService.has_rated(username, movie)
return {
"movie": MovieSchema().dump(movie),
"hasRated": has_rated
}
@staticmethod
def search_by_name(name: str) -> dict:
search = "%{}%".format(name)
return MovieSchema(many=True).dump(
Movie.query.filter(Movie.name.ilike(search)).all())
@staticmethod
def has_rated(username: str, movie: Movie) -> bool:
user = User.query.filter(User.username == username).first()
if not user:
return False
has_rated = False
for rating in movie.ratings:
if rating.user_id == user.id:
has_rated = True
return has_rated
@staticmethod
def rate_movie(username: str, rating: RatingMovieIdSchema) -> dict:
user = User.query.filter(User.username == username).first()
if not user:
raise ResourceNotFound("User not found")
movie = Movie.query.get(rating['movie_id'])
if not movie:
raise ResourceNotFound("Movie not found")
new_rating = Rating(rating=rating['rating'], comment=rating['comment'])
new_rating.movie_id = movie.id
new_rating.user_id = user.id
db.session.add(new_rating)
db.session.commit()
return RatingMovieIdSchema().dump(new_rating)
|
StarcoderdataPython
|
6522674
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 <NAME> (Kronuz)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from __future__ import absolute_import
import re
import textwrap
import docopt
from .compat import get_meth_func
NBSP = '__'
def cmd_usage(doc):
return doc.replace(NBSP, ' ')
docopt_extras_ref = docopt.extras
def docopt_extras(help, version, options, doc):
return docopt_extras_ref(help, version, options, cmd_usage(doc))
def DocoptExit____init__(self, message=''):
SystemExit.__init__(self, (message + '\n' + cmd_usage(self.usage)).strip())
docopt.extras = docopt_extras
docopt.DocoptExit.__init__ = DocoptExit____init__
def spaced(name):
name = re.sub(r'[ _]+', r' ', name)
name = re.sub(r'(?<=[^_])([A-Z])', r' \1', name).lower()
return re.sub(r'^( *)(.*?)( *)$', r'\2', name)
class Command(object):
"""
Usage: command <subcommand> [<args>...]
"""
subcommand_name = '<subcommand>'
argv_name = '<args>'
@staticmethod
def docopt(doc, **kwargs):
name = kwargs.pop('name', "")
name = spaced(name)
doc = textwrap.dedent(doc).replace(name, name.replace(' ', NBSP))
arguments = docopt.docopt(doc, options_first=True, **kwargs)
return arguments
def __init__(self, arguments):
self.arguments = arguments
def __call__(self):
if self.subcommand_name in self.arguments:
cmd = self.arguments[self.subcommand_name]
cmd_attr = cmd.replace('-', '_')
if hasattr(self, cmd_attr):
klass = getattr(self, cmd_attr)
meth_func = get_meth_func(klass)
if meth_func:
cmd = meth_func.__name__.replace('_', '-')
name = '{} {}'.format(self.__class__.__name__, cmd)
if klass.__doc__:
arguments = self.docopt(klass.__doc__, argv=self.arguments.get(self.argv_name, []), name=name)
else:
arguments = []
obj = klass(arguments)
else:
obj = self.run()
else:
obj = self.run()
if callable(obj):
obj = obj()
return obj
def run(self):
raise docopt.DocoptExit()
|
StarcoderdataPython
|
8058828
|
# count = int(input())
# for _ in range(count):
# a, b = map(int, input().split())
# print (a+b)
''' [Question 8393] John got a bad mark in math. The teacher gave him another task. John is to write a program which computes the sum of integers from 1 to n. If he manages to present a correct program, the bad mark will be cancelled.
Write a program which:
reads the number n from the standard input,
computes the sum of integers from 1 to n,
writes the answer to the standard output.
The first and only line of the standard input contains one integer n (1 ≤ n ≤ 10 000).
One integer is to be written to the standard output. This integer should be the sum of integers from 1 to n.
'''
# n = int(input())
# result = 0
# for i in range(1, n+1):
# result += i
# print(result)
''' [Question 15552] '''
# import sys
# n = int(sys.stdin.readline())
# for _ in range(n):
# a, b = map(int, sys.stdin.readline().split())
# print (a+b)
''' [Question 2741]'''
# import sys
# n = int(sys.stdin.readline())
# for i in range(1, n+1):
# print(i)
''' [Question 2742]'''
# import sys
# n = int(sys.stdin.readline())
# for i in range(n, 0,-1):
# print(i)
'''[Question 11021] '''
# import sys
# n = int(sys.stdin.readline())
# for i in range(1, n+1):
# a, b = map(int, sys.stdin.readline().split())
# print("Case #{}:".format(i),a+b)
'''[Question 11022] '''
# import sys
# n = int(sys.stdin.readline())
# for i in range(1, n+1):
# a, b = map(int, sys.stdin.readline().split())
# print("Case #{}:".format(i), a, "+", b, "=", a+b)
'''[Question 2438] '''
# import sys
# n = int(sys.stdin.readline())
# for i in range(1, n+1):
# print("*"*i)
'''[Question 2439] '''
# import sys
# n = int(sys.stdin.readline())
# for i in range(1, n+1):
# c = n - i
# print(" "*c + "*"*i)
'''[Question 10871] '''
# import sys
# n, x = map(int, sys.stdin.readline().split())
# element = list(map(int, sys.stdin.readline().split()))
# for i in range(n):
# if element[i] < x:
# print(element[i], end=" ")
'''[Question 10952] '''
# import sys
# a = 1
# b = 1
# while a != 0 and b != 0:
# a, b = map(int, sys.stdin.readline().split())
# if a==0 and b ==0:
# break
# print(a+b)
|
StarcoderdataPython
|
6635653
|
"""
Pure python implementation of rtree
Modification of
http://code.google.com/p/pyrtree/
"""
__all__ = ['RTree', 'Rect', 'Rtree', 'RTreeError']
MAXCHILDREN = 10
MAX_KMEANS = 5
BUFFER = 0.0000001
import math
import random
import time
import array
class RTreeError(Exception): pass
class Rect(object):
"""
A rectangle class that stores: an axis aligned rectangle, and: two
flags (swapped_x and swapped_y). (The flags are stored
implicitly via swaps in the order of minx/y and maxx/y.)
"""
__slots__ = ("x", "y", "xx", "yy", "swapped_x", "swapped_y")
def __getstate__(self):
return (self.x, self.y, self.xx, self.yy, self.swapped_x, self.swapped_y)
def __setstate__(self, state):
self.x, self.y, self.xx, self.yy, self.swapped_x, self.swapped_y = state
def __init__(self, minx, miny, maxx, maxy):
self.swapped_x = (maxx < minx)
self.swapped_y = (maxy < miny)
self.x = minx
self.y = miny
self.xx = maxx
self.yy = maxy
if self.swapped_x:
self.x, self.xx = maxx, minx
if self.swapped_y:
self.y, self.yy = maxy, miny
def coords(self):
return self.x, self.y, self.xx, self.yy
def overlap(self, orect):
return self.intersect(orect).area()
def write_raw_coords(self, toarray, idx):
toarray[idx] = self.x
toarray[idx + 1] = self.y
toarray[idx + 2] = self.xx
toarray[idx + 3] = self.yy
if (self.swapped_x):
toarray[idx] = self.xx
toarray[idx + 2] = self.x
if (self.swapped_y):
toarray[idx + 1] = self.yy
toarray[idx + 3] = self.y
def area(self):
w = self.xx - self.x
h = self.yy - self.y
return w * h
def extent(self):
x = self.x
y = self.y
return (x, y, self.xx - x, self.yy - y)
def grow(self, amt):
a = amt * 0.5
return Rect(self.x - a, self.y - a, self.xx + a, self.yy + a)
def intersect(self, o):
if self is NullRect:
return NullRect
if o is NullRect:
return NullRect
nx, ny = max(self.x, o.x), max(self.y, o.y)
nx2, ny2 = min(self.xx, o.xx), min(self.yy, o.yy)
w, h = nx2 - nx, ny2 - ny
if w <= 0 or h <= 0:
return NullRect
return Rect(nx, ny, nx2, ny2)
def does_contain(self, o):
return self.does_containpoint((o.x, o.y)) and self.does_containpoint((o.xx, o.yy))
def does_intersect(self, o):
return (self.intersect(o).area() > 0)
def does_containpoint(self, p):
x, y = p
return (x >= self.x and x <= self.xx and y >= self.y and y <= self.yy)
def union(self, o):
if o is NullRect:
return Rect(self.x, self.y, self.xx, self.yy)
if self is NullRect:
return Rect(o.x, o.y, o.xx, o.yy)
x = self.x
y = self.y
xx = self.xx
yy = self.yy
ox = o.x
oy = o.y
oxx = o.xx
oyy = o.yy
nx = x if x < ox else ox
ny = y if y < oy else oy
nx2 = xx if xx > oxx else oxx
ny2 = yy if yy > oyy else oyy
res = Rect(nx, ny, nx2, ny2)
return res
def union_point(self, o):
x, y = o
return self.union(Rect(x, y, x, y))
def diagonal_sq(self):
if self is NullRect:
return 0
w = self.xx - self.x
h = self.yy - self.y
return w * w + h * h
def diagonal(self):
return math.sqrt(self.diagonal_sq())
NullRect = Rect(0.0, 0.0, 0.0, 0.0)
NullRect.swapped_x = False
NullRect.swapped_y = False
def union_all(kids):
cur = NullRect
for k in kids:
cur = cur.union(k.rect)
assert(False == cur.swapped_x)
return cur
def Rtree():
return RTree()
class RTree(object):
def __init__(self, stream=None):
self.count = 0
self.stats = {
"overflow_f": 0,
"avg_overflow_t_f": 0.0,
"longest_overflow": 0.0,
"longest_kmeans": 0.0,
"sum_kmeans_iter_f": 0,
"count_kmeans_iter_f": 0,
"avg_kmeans_iter_f": 0.0
}
# This round: not using objects directly -- they
# take up too much memory, and efficiency goes down the toilet
# (obviously) if things start to page.
# Less obviously: using object graph directly leads to really long GC
# pause times, too.
# Instead, it uses pools of arrays:
self.count = 0
self.leaf_count = 0
self.rect_pool = array.array('d')
self.node_pool = array.array('L')
self.leaf_pool = [] # leaf objects.
self.cursor = _NodeCursor.create(self, NullRect)
if stream:
for i, bound, idx in stream:
self.add(id=idx, boundingbox=bound)
def _ensure_pool(self, idx):
if len(self.rect_pool) < (4 * idx):
self.rect_pool.extend([0, 0, 0, 0] * idx)
self.node_pool.extend([0, 0] * idx)
def insert(self, o, orect):
self.cursor.insert(o, orect)
assert(self.cursor.index == 0)
def query_rect(self, r):
for x in self.cursor.query_rect(r):
yield x
def query_point(self, p):
for x in self.cursor.query_point(p):
yield x
def walk(self, pred):
return self.cursor.walk(pred)
def intersection(self, boundingbox):
"""
replicate c rtree method
Returns
-------
ids : list
list of object ids whose bounding boxes intersect with query
bounding box
"""
# grow the bounding box slightly to handle coincident edges
bb = boundingbox[:]
bb[0] = bb[0] - BUFFER
bb[1] = bb[1] - BUFFER
bb[2] = bb[2] + BUFFER
bb[3] = bb[3] + BUFFER
qr = Rect(bb[0], bb[1], bb[2], bb[3])
return [r.leaf_obj() for r in self.query_rect(qr) if r.is_leaf()]
def add(self, id, boundingbox):
"""
replicate c rtree method
Arguments
---------
id: object id
boundingbox: list
bounding box [minx, miny, maxx, maxy]
"""
bb = boundingbox
self.cursor.insert(id, Rect(bb[0], bb[1], bb[2], bb[3]))
class _NodeCursor(object):
@classmethod
def create(cls, rooto, rect):
idx = rooto.count
rooto.count += 1
rooto._ensure_pool(idx + 1)
#rooto.node_pool.extend([0,0])
#rooto.rect_pool.extend([0,0,0,0])
retv = _NodeCursor(rooto, idx, rect, 0, 0)
retv._save_back()
return retv
@classmethod
def create_with_children(cls, children, rooto):
rect = union_all([c for c in children])
nr = Rect(rect.x, rect.y, rect.xx, rect.yy)
assert(not rect.swapped_x)
nc = _NodeCursor.create(rooto, rect)
nc._set_children(children)
assert(not nc.is_leaf())
return nc
@classmethod
def create_leaf(cls, rooto, leaf_obj, leaf_rect):
rect = Rect(leaf_rect.x, leaf_rect.y, leaf_rect.xx, leaf_rect.yy)
rect.swapped_x = True # Mark as leaf by setting the xswap flag.
res = _NodeCursor.create(rooto, rect)
idx = res.index
res.first_child = rooto.leaf_count
rooto.leaf_count += 1
res.next_sibling = 0
rooto.leaf_pool.append(leaf_obj)
res._save_back()
res._become(idx)
assert(res.is_leaf())
return res
__slots__ = ("root", "npool", "rpool", "index", "rect",
"next_sibling", "first_child")
def __getstate__(self):
return (self.root, self.npool, self.rpool, self.index, self.rect, self.next_sibling, self.first_child)
def __setstate__(self, state):
self.root, self.npool, self.rpool, self.index, self.rect, self.next_sibling, self.first_child = state
def __init__(self, rooto, index, rect, first_child, next_sibling):
self.root = rooto
self.rpool = rooto.rect_pool
self.npool = rooto.node_pool
self.index = index
self.rect = rect
self.next_sibling = next_sibling
self.first_child = first_child
def walk(self, predicate):
if (predicate(self, self.leaf_obj())):
yield self
if not self.is_leaf():
for c in self.children():
for cr in c.walk(predicate):
yield cr
def query_rect(self, r):
""" Return things that intersect with 'r'. """
def p(o, x):
return r.does_intersect(o.rect)
for rr in self.walk(p):
yield rr
def query_point(self, point):
""" Query by a point """
def p(o, x):
return o.rect.does_containpoint(point)
for rr in self.walk(p):
yield rr
def lift(self):
return _NodeCursor(self.root,
self.index,
self.rect,
self.first_child,
self.next_sibling)
def _become(self, index):
recti = index * 4
nodei = index * 2
rp = self.rpool
x = rp[recti]
y = rp[recti + 1]
xx = rp[recti + 2]
yy = rp[recti + 3]
if (x == 0.0 and y == 0.0 and xx == 0.0 and yy == 0.0):
self.rect = NullRect
else:
self.rect = Rect(x, y, xx, yy)
self.next_sibling = self.npool[nodei]
self.first_child = self.npool[nodei + 1]
self.index = index
def is_leaf(self):
return self.rect.swapped_x
def has_children(self):
return not self.is_leaf() and 0 != self.first_child
def holds_leaves(self):
if 0 == self.first_child:
return True
else:
return self.has_children() and self.get_first_child().is_leaf()
def get_first_child(self):
fc = self.first_child
c = _NodeCursor(self.root, 0, NullRect, 0, 0)
c._become(self.first_child)
return c
def leaf_obj(self):
if self.is_leaf():
return self.root.leaf_pool[self.first_child]
else:
return None
def _save_back(self):
rp = self.rpool
recti = self.index * 4
nodei = self.index * 2
if self.rect is not NullRect:
self.rect.write_raw_coords(rp, recti)
else:
rp[recti] = 0
rp[recti + 1] = 0
rp[recti + 2] = 0
rp[recti + 3] = 0
self.npool[nodei] = self.next_sibling
self.npool[nodei + 1] = self.first_child
def nchildren(self):
i = self.index
c = 0
for x in self.children():
c += 1
return c
def insert(self, leafo, leafrect):
index = self.index
# tail recursion, made into loop:
while True:
if self.holds_leaves():
self.rect = self.rect.union(leafrect)
self._insert_child(_NodeCursor.create_leaf(
self.root, leafo, leafrect))
self._balance()
# done: become the original again
self._become(index)
return
else:
# Not holding leaves, move down a level in the tree:
# Micro-optimization:
# inlining union() calls -- logic is:
# ignored,child = min([ ((c.rect.union(leafrect)).area() - c.rect.area(),c.index) for c in self.children() ])
child = None
minarea = -1.0
for c in self.children():
x, y, xx, yy = c.rect.coords()
lx, ly, lxx, lyy = leafrect.coords()
nx = x if x < lx else lx
nxx = xx if xx > lxx else lxx
ny = y if y < ly else ly
nyy = yy if yy > lyy else lyy
a = (nxx - nx) * (nyy - ny)
if minarea < 0 or a < minarea:
minarea = a
child = c.index
# End micro-optimization
self.rect = self.rect.union(leafrect)
self._save_back()
self._become(child) # recurse.
def _balance(self):
if (self.nchildren() <= MAXCHILDREN):
return
t = time.clock()
cur_score = -10
s_children = [c.lift() for c in self.children()]
memo = {}
clusterings = [k_means_cluster(
self.root, k, s_children) for k in range(2, MAX_KMEANS)]
score, bestcluster = max(
[(silhouette_coeff(c, memo), c) for c in clusterings])
nodes = [_NodeCursor.create_with_children(
c, self.root) for c in bestcluster if len(c) > 0]
self._set_children(nodes)
dur = (time.clock() - t)
c = float(self.root.stats["overflow_f"])
oa = self.root.stats["avg_overflow_t_f"]
self.root.stats["avg_overflow_t_f"] = (
dur / (c + 1.0)) + (c * oa / (c + 1.0))
self.root.stats["overflow_f"] += 1
self.root.stats["longest_overflow"] = max(
self.root.stats["longest_overflow"], dur)
def _set_children(self, cs):
self.first_child = 0
if 0 == len(cs):
return
pred = None
for c in cs:
if pred is not None:
pred.next_sibling = c.index
pred._save_back()
if 0 == self.first_child:
self.first_child = c.index
pred = c
pred.next_sibling = 0
pred._save_back()
self._save_back()
def _insert_child(self, c):
c.next_sibling = self.first_child
self.first_child = c.index
c._save_back()
self._save_back()
def children(self):
if (0 == self.first_child):
return
idx = self.index
fc = self.first_child
ns = self.next_sibling
r = self.rect
self._become(self.first_child)
while True:
yield self
if 0 == self.next_sibling:
break
else:
self._become(self.next_sibling)
# Go back to becoming the same node we were.
#self._become(idx)
self.index = idx
self.first_child = fc
self.next_sibling = ns
self.rect = r
def avg_diagonals(node, onodes, memo_tab):
nidx = node.index
sv = 0.0
diag = 0.0
for onode in onodes:
k1 = (nidx, onode.index)
k2 = (onode.index, nidx)
if k1 in memo_tab:
diag = memo_tab[k1]
elif k2 in memo_tab:
diag = memo_tab[k2]
else:
diag = node.rect.union(onode.rect).diagonal()
memo_tab[k1] = diag
sv += diag
return sv / len(onodes)
def silhouette_w(node, cluster, next_closest_cluster, memo):
ndist = avg_diagonals(node, cluster, memo)
sdist = avg_diagonals(node, next_closest_cluster, memo)
return (sdist - ndist) / max(sdist, ndist)
def silhouette_coeff(clustering, memo_tab):
# special case for a clustering of 1.0
if (len(clustering) == 1):
return 1.0
coeffs = []
for cluster in clustering:
others = [c for c in clustering if c is not cluster]
others_cntr = [center_of_gravity(c) for c in others]
ws = [silhouette_w(node, cluster, others[closest(
others_cntr, node)], memo_tab) for node in cluster]
cluster_coeff = sum(ws) / len(ws)
coeffs.append(cluster_coeff)
return sum(coeffs) / len(coeffs)
def center_of_gravity(nodes):
totarea = 0.0
xs, ys = 0, 0
for n in nodes:
if n.rect is not NullRect:
x, y, w, h = n.rect.extent()
a = w * h
xs = xs + (a * (x + (0.5 * w)))
ys = ys + (a * (y + (0.5 * h)))
totarea = totarea + a
return (xs / totarea), (ys / totarea)
def closest(centroids, node):
x, y = center_of_gravity([node])
dist = -1
ridx = -1
for (i, (xx, yy)) in enumerate(centroids):
dsq = ((xx - x) ** 2) + ((yy - y) ** 2)
if -1 == dist or dsq < dist:
dist = dsq
ridx = i
return ridx
def k_means_cluster(root, k, nodes):
t = time.clock()
if len(nodes) <= k:
return [[n] for n in nodes]
ns = list(nodes)
root.stats["count_kmeans_iter_f"] += 1
# Initialize: take n random nodes.
#random.shuffle(ns)
cluster_starts = ns[:k]
cluster_centers = [center_of_gravity([n]) for n in ns[:k]]
# Loop until stable:
while True:
root.stats["sum_kmeans_iter_f"] += 1
clusters = [[] for c in cluster_centers]
for n in ns:
idx = closest(cluster_centers, n)
clusters[idx].append(n)
clusters = [c for c in clusters if len(c) > 0]
for c in clusters:
if (len(c) == 0):
print("Errorrr....")
print("Nodes: %d, centers: %s" % (len(ns),
repr(cluster_centers)))
assert(len(c) > 0)
rest = ns
first = False
new_cluster_centers = [center_of_gravity(c) for c in clusters]
if new_cluster_centers == cluster_centers:
root.stats["avg_kmeans_iter_f"] = float(root.stats["sum_kmeans_iter_f"] / root.stats["count_kmeans_iter_f"])
root.stats["longest_kmeans"] = max(
root.stats["longest_kmeans"], (time.clock() - t))
return clusters
else:
cluster_centers = new_cluster_centers
|
StarcoderdataPython
|
8103300
|
import keras
import cv2
import numpy as np
import argparse
from glob import glob
# GPU config
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
from keras import backend as K
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.visible_device_list="0"
sess = tf.Session(config=config)
K.set_session(sess)
# network
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D, Input, BatchNormalization
num_classes = 2
img_height, img_width = 64, 64
def Mynet():
inputs = Input((img_height, img_width, 3))
x = Conv2D(32, (3, 3), padding='same', activation='relu', name='conv1_1')(inputs)
x = BatchNormalization()(x)
x = Conv2D(32, (3, 3), padding='same', activation='relu', name='conv1_2')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2,2), padding='same')(x)
x = Conv2D(64, (3, 3), padding='same', activation='relu', name='conv2_1')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (3, 3), padding='same', activation='relu', name='conv2_2')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2,2), padding='same')(x)
x = Conv2D(128, (3, 3), padding='same', activation='relu', name='conv3_1')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (3, 3), padding='same', activation='relu', name='conv3_2')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2,2), padding='same')(x)
x = Conv2D(256, (3, 3), padding='same', activation='relu', name='conv4_1')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (3, 3), padding='same', activation='relu', name='conv4_2')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2,2), padding='same')(x)
x = Flatten()(x)
x = Dense(1024, name='dense1', activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(1024, name='dense2', activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(num_classes, activation='softmax')(x)
model = Model(inputs=inputs, outputs=x, name='model')
return model
def data_load(dir_path):
xs = np.ndarray((0, img_height, img_width, 3))
ts = np.ndarray((0, num_classes))
paths = []
for dir_path in glob(dir_path + '/*'):
for path in glob(dir_path + '/*'):
x = cv2.imread(path)
x = cv2.resize(x, (img_width, img_height)).astype(np.float32)
x /= 255.
xs = np.r_[xs, x[None, ...]]
t = np.zeros((num_classes))
if 'akahara' in path:
t[0] = 1
elif 'madara' in path:
t[1] = 1
ts = np.r_[ts, t[None, ...]]
paths.append(path)
return xs, ts, paths
# train
def train():
model = Mynet()
for layer in model.layers:
layer.trainable = True
model.compile(
loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True),
metrics=['accuracy'])
xs, ts, paths = data_load('../Dataset/train/images')
# training
mb = 8
mbi = 0
train_ind = np.arange(len(xs))
np.random.seed(0)
np.random.shuffle(train_ind)
for i in range(100):
if mbi + mb > len(xs):
mb_ind = train_ind[mbi:]
np.random.shuffle(train_ind)
mb_ind = np.hstack((mb_ind, train_ind[:(mb-(len(xs)-mbi))]))
mbi = mb - (len(xs) - mbi)
else:
mb_ind = train_ind[mbi: mbi+mb]
mbi += mb
x = xs[mb_ind]
t = ts[mb_ind]
loss, acc = model.train_on_batch(x=x, y=t)
print("iter >>", i+1, ",loss >>", loss, ',accuracy >>', acc)
model.save('model.h5')
# test
def test():
# load trained model
model = Mynet()
model.load_weights('model.h5')
xs, ts, paths = data_load("../Dataset/test/images/")
for i in range(len(paths)):
x = xs[i]
t = ts[i]
path = paths[i]
x = np.expand_dims(x, axis=0)
pred = model.predict_on_batch(x)[0]
print("in {}, predicted probabilities >> {}".format(path, pred))
def arg_parse():
parser = argparse.ArgumentParser(description='CNN implemented with Keras')
parser.add_argument('--train', dest='train', action='store_true')
parser.add_argument('--test', dest='test', action='store_true')
args = parser.parse_args()
return args
# main
if __name__ == '__main__':
args = arg_parse()
if args.train:
train()
if args.test:
test()
if not (args.train or args.test):
print("please select train or test flag")
print("train: python main.py --train")
print("test: python main.py --test")
print("both: python main.py --train --test")
|
StarcoderdataPython
|
12852228
|
<reponame>MichelangeloConserva/Colosseum<filename>colosseum/mdps/river_swim/episodic/mdp.py
import gin
from colosseum.loops import human_loop
from colosseum.mdps import EpisodicMDP
from colosseum.mdps.river_swim.river_swim import RiverSwimMDP
@gin.configurable
class RiverSwimEpisodic(EpisodicMDP, RiverSwimMDP):
@property
def _graph_layout(self):
return {node: tuple(node) for node in self.G}
if __name__ == "__main__":
mdp = RiverSwimEpisodic(
seed=42,
randomize_actions=False,
size=15,
lazy=0.01,
random_action_p=0.1,
make_reward_stochastic=True,
)
# random_loop(mdp, 50, verbose=True)
human_loop(mdp)
|
StarcoderdataPython
|
3506756
|
<reponame>NatanaelAntonioli/PiramidesCopaDoMundo<filename>Buscador.py
# --------------- PRELIMINARES ------------------------
#Dependências
import xlrd #Precisa de pip
import xlwt #Precisa de pip
import math
# Mapeia letra para número
def letra(letra):
return (ord(letra)) - 97
# Retorna o valor de uma célula
def celula(coluna, linha):
return format(sh.cell_value(rowx=linha-1, colx=letra(coluna)))
# --------------- RESTRIÇÕES ------------------------
usarrestricoes = 1 # Ativa as restrições
restricao_raio = 9 # Define o raio de busca, ou seja, quantos "degraus" a pirâmide terá. Coloque 9 para uma comparação justa com a Copa do Mundo.
valor_limiar = 3 # Notifica ao final caso o total de acertos seja maior ou igual
total_iteracoes = 10
# -------------- LISTAS PARA ESTATÍSTICA FINAL --------------
totais_finais = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
# --------------- CÓDIGO PRINCIPAL ---------------------
## Para cada conjunto
for contador_iteracoes in range (0, total_iteracoes):
anos = []
acertos = []
limiar = []
totais = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
nome = 'iteracao_ordenada' + str(contador_iteracoes) + '.xls'
plan = xlrd.open_workbook(nome)
sh = plan.sheet_by_index(0)
for j in range (2,24):
match = 0
meio = j
if (usarrestricoes == 1):
raio = restricao_raio + 1
else:
raio = 200
if (celula ('c', meio) != '' and celula ('c', meio) != '.'):
#print ("")
#print ("------------- TESTANDO PARA CENTRO EM " + celula('d', meio) + ' ( ' + celula ('c', meio) + ' ) ' + "---------- SOMA: " + str(2 * float(celula ('c', meio))))
#print ("")
for i in range (1, raio):
if (celula('d', meio) == ''):
break
if ((celula('d', meio + i)) != '') and ((celula('d', meio - i)) != ''):
if ((celula('d', meio + i)) == (celula('d', meio - i))):
if ((celula('d', meio + i) != '')):
match = match + 1
#print (celula('d', meio + i) + '(' + celula ('c', meio +i) + ')' + 'vs ' + celula('d', meio - i) + '(' + celula ('c', meio - i) + ')' + " --> ACERTO!")
else:
a = 1
#print (celula('d', meio + i) + '(' + celula ('c', meio +i) + ')' + 'vs ' + celula('d', meio - i) + '(' + celula ('c', meio - i) + ')')
#print ("Total de acertos:", match)
totais[match]= totais[match]+1
anos.append(celula('d', meio) + '(' + celula ('c', meio) + ')')
acertos.append(match)
if (match >= valor_limiar):
limiar.append(' --> LIMIAR!')
else:
limiar.append('')
print ("")
print ("------------------ TESTANDO: " + nome + "---------------")
print ("")
for x in range(len(anos)):
print (anos[x] + "com " + str(acertos[x]) + " acertos" + limiar[x])
for x in range(0, 9):
formatado = "{:.0f}".format(x/(restricao_raio)*100)
if (totais[x] != 0):
totais_finais[x] = totais_finais[x] + 1
print ("")
print ("-------------- TOTAIS FINAIS DE PIRÂMIDES ----------------- ")
print ("")
for y in range (0,9):
print (str(totais_finais[y]) + " de " + str(total_iteracoes) + " ordenações aleatórias têm ao menos uma pirâmide de " + str(y) + " acertos.")
|
StarcoderdataPython
|
1648499
|
from functools import reduce # Required in Python 3
from typing import Iterable, TypeVar
import operator
T = TypeVar('T')
def prod(iterable: Iterable[T]) -> T:
"""
Returns the product of the elements in the given iterable.
"""
return reduce(operator.mul, iterable, 1)
|
StarcoderdataPython
|
345822
|
<gh_stars>1-10
def build_model_filters(model, query, field):
filters = []
if query:
# The field exists as an exposed column
if model.__mapper__.has_property(field):
filters.append(getattr(model, field).like("%{}%".format(query)))
return filters
|
StarcoderdataPython
|
280496
|
<gh_stars>10-100
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient la classe PotionVente, détaillée plus bas."""
from abstraits.obase import BaseObj
from corps.fonctions import lisser
class PotionVente(BaseObj):
"""Cette classe enveloppe un conteneur de potion et une potion.
Elle simule certains comportements d'un objet standard afin de permettre
la vente des deux en magasin via la syntaxe /s conteneur/potion (voir
l'éditeur de magasin du module salle).
"""
type_achat = "potion"
def __init__(self, proto_conteneur, proto_potion):
"""Constructeur de la classe"""
BaseObj.__init__(self)
self.conteneur = proto_conteneur
self.potion = proto_potion
self._construire()
def __getnewargs__(self):
return (None, None)
def __repr__(self):
return "<{} dans {}>".format(self.potion.cle, self.conteneur.cle)
def __str__(self):
return self.conteneur.cle + "/" + self.potion.cle
@property
def cle(self):
return self.conteneur.cle + "/" + self.potion.cle
@property
def m_valeur(self):
return self.conteneur.prix + self.potion.prix
@property
def nom_achat(self):
if self.potion == "eau":
nom = "eau"
else:
nom = self.potion.nom_singulier
ajout = lisser(
" " + self.conteneur.connecteur.format(s="") + " " + nom)
return self.conteneur.nom_singulier + ajout
def get_nom(self, nombre=1):
"""Retourne le nom complet en fonction du nombre.
Par exemple :
Si nombre == 1 : retourne le nom singulier
Sinon : retourne le nombre et le nom pluriel
"""
ajout = ""
if self.potion is not None:
s = "s" if nombre > 1 else ""
if self.potion == "eau":
nom = "eau"
else:
nom = self.potion.get_nom()
ajout = lisser(
" " + self.conteneur.connecteur.format(s=s) + " " + nom)
if nombre <= 0:
raise ValueError("la fonction get_nom a été appelée " \
"avec un nombre négatif ou nul.")
elif nombre == 1:
return self.conteneur.nom_singulier + ajout
else:
if self.conteneur.noms_sup:
noms_sup = list(self.conteneur.noms_sup)
noms_sup.reverse()
for nom in noms_sup:
if nombre >= nom[0]:
return nom[1] + ajout
return str(nombre) + " " + self.conteneur.nom_pluriel + ajout
def acheter(self, quantite, magasin, transaction):
"""Achète les objets dans la quantité spécifiée."""
salle = magasin.parent
for i in range(quantite):
conteneur = importeur.objet.creer_objet(self.conteneur)
conteneur.potion = importeur.objet.creer_objet(self.potion)
salle.objets_sol.ajouter(conteneur)
def regarder(self, personnage):
"""Le personnage regarde le service (avant achat)."""
msg = self.conteneur.regarder(personnage) + "\n"
msg += self.potion.description.regarder(personnage)
return msg
|
StarcoderdataPython
|
6500688
|
<filename>research/nlp/seq2seq/src/seq2seq_model/components.py
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Components of model."""
import mindspore.common.dtype as mstype
import mindspore.nn as nn
from mindspore.ops import operations as P
class SaturateCast(nn.Cell):
"""Cast wrapper."""
def __init__(self, dst_type=mstype.float32):
super(SaturateCast, self).__init__()
self.cast = P.Cast()
self.dst_type = dst_type
def construct(self, x):
return self.cast(x, self.dst_type)
class LayerNorm(nn.Cell):
"""
Do layer norm.
Args:
in_channels (int): In channels number of layer norm.
return_2d (bool): Whether return 2d tensor.
Returns:
Tensor, output.
"""
def __init__(self, in_channels=None, return_2d=False):
super(LayerNorm, self).__init__()
self.return_2d = return_2d
self.layer_norm = nn.LayerNorm((in_channels,))
self.cast = P.Cast()
self.get_dtype = P.DType()
self.reshape = P.Reshape()
self.get_shape = P.Shape()
def construct(self, input_tensor):
"""Do layer norm."""
shape = self.get_shape(input_tensor)
batch_size = shape[0]
max_len = shape[1]
embed_dim = shape[2]
output = self.reshape(input_tensor, (-1, embed_dim))
output = self.cast(output, mstype.float32)
output = self.layer_norm(output)
output = self.cast(output, self.get_dtype(input_tensor))
if not self.return_2d:
output = self.reshape(output, (batch_size, max_len, embed_dim))
return output
|
StarcoderdataPython
|
8045770
|
import os
import unittest
import pikepdf
import pdf_preflight.rules as rules
import pdf_preflight.profiles as profiles
pdf_folder = os.path.join(os.path.dirname(__file__), "pdfs")
class TestPdfPreflight(unittest.TestCase):
def test_profile__pdfa1a(self):
filename = os.path.join(pdf_folder, "pdfa-1a.pdf")
self.assertEqual(None, profiles.Pdfa1a.check_preflight(filename))
filename = os.path.join(pdf_folder, "standard_14_font.pdf")
with self.assertRaisesRegex(Exception, "^PDF failed Preflight checks.*") as cm:
profiles.Pdfa1a.check_preflight(filename)
expected_exception = ("PDF failed Preflight checks with the following Issues & exceptions:\n"
"ISSUES:\n"
"Rule 'OnlyEmbeddedFonts' found an error on page 1: "
"All fonts must be embedded; found non-embedded font.\n")
self.assertTrue(str(cm.exception).startswith(expected_exception))
def test_profile__pdfx1a(self):
filename = os.path.join(pdf_folder, "pdfx-1a.pdf")
self.assertEqual(None, profiles.Pdfx1a.check_preflight(filename))
filename = os.path.join(pdf_folder, "fails_pdfx.pdf")
with self.assertRaisesRegex(Exception, "^PDF failed Preflight checks.*") as cm:
profiles.Pdfx1a.check_preflight(filename)
expected_exception = ("PDF failed Preflight checks with the following Issues & exceptions:\n"
"ISSUES:\n"
"Rule 'InfoHasKeys' found an error in document metadata: "
"Info dict missing required key '/ModDate'\n"
"Rule 'InfoHasKeys' found an error in document metadata: "
"Info dict missing required key '/Title'\n"
"Rule 'InfoSpecifiesTrapping' found an error in document metadata: "
"Info dict missing required key '/Trapped'.\n"
"Rule 'MatchInfoEntries' found an error in document metadata: "
"Info dict missing required key '/GTS_PDFXConformance'\n"
"Rule 'MatchInfoEntries' found an error in document metadata: "
"Info dict missing required key '/GTS_PDFXVersion'\n"
"Rule 'MaxVersion' found an error in document metadata: "
"PDF version should be 1.3 or lower.\n"
"Rule 'NoRgb' found an error on page 1-100: "
"Found RGB colorspace; RGB objects are prohibited.\n"
"Rule 'NoTransparency' found an error on page 1-100: "
"Found object with transparency; transparent objects are prohibited.\n"
"Rule 'OutputIntentForPdfx' found an error in document metadata: "
"OutputIntent with subtype '/GTS_PDFX' is required but was not found.\n"
"Rule 'PdfxOutputIntentHasKeys' found an error in document metadata: "
"GTS_PDFX OutputIntent not found, assumed to be missing all required keys "
"'['/OutputConditionIdentifier', '/Info']'.\n"
"Rule 'PrintBoxes' found an error on page 1-100: "
"ArtBox or TrimBox is required, but neither was found; TrimBox is preferred.\n"
"Rule 'RootHasKeys' found an error in document metadata: "
"Root dict missing required key '/OutputIntents'\n")
self.assertTrue(str(cm.exception).startswith(expected_exception))
def test_profile__pdfx1a2003(self):
filename = os.path.join(pdf_folder, "pdfx-1a-2003.pdf")
self.assertEqual(None, profiles.Pdfx1a2003.check_preflight(filename))
filename = os.path.join(pdf_folder, "fails_pdfx.pdf")
with self.assertRaisesRegex(Exception, "^PDF failed Preflight checks.*") as cm:
profiles.Pdfx1a2003.check_preflight(filename)
expected_exception = ("PDF failed Preflight checks with the following Issues & exceptions:\n"
"ISSUES:\n"
"Rule 'InfoHasKeys' found an error in document metadata: "
"Info dict missing required key '/ModDate'\n"
"Rule 'InfoHasKeys' found an error in document metadata: "
"Info dict missing required key '/Title'\n"
"Rule 'InfoSpecifiesTrapping' found an error in document metadata: "
"Info dict missing required key '/Trapped'.\n"
"Rule 'MatchInfoEntries' found an error in document metadata: "
"Info dict missing required key '/GTS_PDFXVersion'\n"
"Rule 'MaxVersion' found an error in document metadata: "
"PDF version should be 1.4 or lower.\n"
"Rule 'NoRgb' found an error on page 1-100: "
"Found RGB colorspace; RGB objects are prohibited.\n"
"Rule 'NoTransparency' found an error on page 1-100: "
"Found object with transparency; transparent objects are prohibited.\n"
"Rule 'OutputIntentForPdfx' found an error in document metadata: "
"OutputIntent with subtype '/GTS_PDFX' is required but was not found.\n"
"Rule 'PdfxOutputIntentHasKeys' found an error in document metadata: "
"GTS_PDFX OutputIntent not found, assumed to be missing all required keys "
"'['/OutputConditionIdentifier', '/Info']'.\n"
"Rule 'PrintBoxes' found an error on page 1-100: "
"ArtBox or TrimBox is required, but neither was found; TrimBox is preferred.\n"
"Rule 'RootHasKeys' found an error in document metadata: "
"Root dict missing required key '/OutputIntents'\n")
self.assertTrue(str(cm.exception).startswith(expected_exception))
######################################################################
def test_rule__box_nesting(self):
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.BoxNesting.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "bleedbox_larger_than_mediabox.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.BoxNesting.check(pdf)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("BoxNesting", issue.rule)
self.assertEqual("BleedBox must be smaller than MediaBox", issue.desc)
def test_rule__compression_algorithms(self):
filename = os.path.join(pdf_folder, "jbig2.pdf")
with pikepdf.open(filename) as pdf:
allowed_algorithms = ["/FlateDecode", "/JBIG2Decode"]
issues = rules.CompressionAlgorithms.check(pdf, allowed_algorithms)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "jbig2.pdf")
with pikepdf.open(filename) as pdf:
allowed_algorithms = ["/FlateDecode"]
issues = rules.CompressionAlgorithms.check(pdf, allowed_algorithms)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("CompressionAlgorithms", issue.rule)
self.assertEqual("File uses unwanted compression algorithm: '/JBIG2Decode'", issue.desc)
def test_rule__document_id(self):
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.DocumentId.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "no_document_id.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.DocumentId.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("DocumentId", issue.rule)
self.assertEqual("Document ID missing.", issue.desc)
def test_rule__info_has_keys(self):
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/Creator", "/Producer"]
issues = rules.InfoHasKeys.check(pdf, entries)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/GTS_PDFXVersion"]
issues = rules.InfoHasKeys.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("InfoHasKeys", issue.rule)
self.assertEqual("Info dict missing required key '/GTS_PDFXVersion'", issue.desc)
def test_rule__info_specifies_trapping(self):
filename = os.path.join(pdf_folder, "trapped_false.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.InfoSpecifiesTrapping.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "trapped_true.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.InfoSpecifiesTrapping.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "trapped_broken.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.InfoSpecifiesTrapping.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("InfoSpecifiesTrapping", issue.rule)
self.assertEqual("Value of Info entry '/Trapped' must be True or False.", issue.desc)
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.InfoSpecifiesTrapping.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("InfoSpecifiesTrapping", issue.rule)
self.assertEqual("Info dict missing required key '/Trapped'.", issue.desc)
def test_rule__match_info_entries(self):
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = {"/Creator": r"Prawn"}
issues = rules.MatchInfoEntries.check(pdf, entries)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = {"/GTS_PDFXVersion": "^PDF/X.*"}
issues = rules.MatchInfoEntries.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("MatchInfoEntries", issue.rule)
self.assertEqual("Info dict missing required key '/GTS_PDFXVersion'", issue.desc)
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = {"/Creator": r"Shrimp"}
issues = rules.MatchInfoEntries.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("MatchInfoEntries", issue.rule)
self.assertEqual("Value of Info entry '/Creator' doesn't match regex 'Shrimp'", issue.desc)
def test_rule__max_version(self):
filename = os.path.join(pdf_folder, "version_1_3.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.MaxVersion.check(pdf, "1.3")
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "version_1_3.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.MaxVersion.check(pdf, "1.4")
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "version_1_4.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.MaxVersion.check(pdf, "1.4")
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "version_1_4.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.MaxVersion.check(pdf, "1.3")
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("MaxVersion", issue.rule)
self.assertEqual("PDF version should be 1.3 or lower.", issue.desc)
def test_rule__no_filespecs(self):
filename = os.path.join(pdf_folder, "rgb.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoFilespecs.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "filespec_to_external_file.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoFilespecs.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("NoFilespecs", issue.rule)
self.assertEqual("Found one or more filespecs; use of filespecs to reference external files is prohibited.",
issue.desc)
def test_rule__no_rgb(self):
filename = os.path.join(pdf_folder, "gray.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoRgb.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "rgb.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoRgb.check(pdf)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("NoRgb", issue.rule)
self.assertEqual("Found RGB colorspace; RGB objects are prohibited.",
issue.desc)
def test_rule__no_transparency(self):
filename = os.path.join(pdf_folder, "gray.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoTransparency.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "transparency.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.NoTransparency.check(pdf)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("NoTransparency", issue.rule)
self.assertEqual("Found object with transparency; transparent objects are prohibited.",
issue.desc)
def test_rule__only_embedded_fonts(self):
# pass a file with embedded fonts that don't have subsets
filename = os.path.join(pdf_folder, "pdfx-1a-no-subsetting.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OnlyEmbeddedFonts.check(pdf)
self.assertEqual(None, issues)
# pass a file with embedded fonts that do have subsets
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OnlyEmbeddedFonts.check(pdf)
self.assertEqual(None, issues)
# fail a file with a standard font that's not embedded
filename = os.path.join(pdf_folder, "standard_14_font.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OnlyEmbeddedFonts.check(pdf)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("OnlyEmbeddedFonts", issue.rule)
self.assertEqual("All fonts must be embedded; found non-embedded font.", issue.desc)
def test_rule__output_intent_for_pdfx(self):
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OutputIntentForPdfx.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "two_outputintents.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OutputIntentForPdfx.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("OutputIntentForPdfx", issue.rule)
self.assertEqual("Exactly one OutputIntent with subtype '/GTS_PDFX' is required; found multiple.",
issue.desc)
filename = os.path.join(pdf_folder, "version_1_4.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.OutputIntentForPdfx.check(pdf)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("OutputIntentForPdfx", issue.rule)
self.assertEqual("OutputIntent with subtype '/GTS_PDFX' is required but was not found.", issue.desc)
def test_rule__pdfx_output_intent_has_keys(self):
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/OutputConditionIdentifier", "/Info"]
issues = rules.PdfxOutputIntentHasKeys.check(pdf, entries)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/Cheese"]
issues = rules.PdfxOutputIntentHasKeys.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("PdfxOutputIntentHasKeys", issue.rule)
self.assertEqual("GTS_PDFX OutputIntent missing required key '/Cheese'.",
issue.desc)
filename = os.path.join(pdf_folder, "version_1_4.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/Info"]
issues = rules.PdfxOutputIntentHasKeys.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("PdfxOutputIntentHasKeys", issue.rule)
self.assertEqual("GTS_PDFX OutputIntent not found, assumed to be missing all required keys '['/Info']'.",
issue.desc)
def test_rule__print_boxes(self):
filename = os.path.join(pdf_folder, "pdfx-1a-subsetting.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.PrintBoxes.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "inherited_page_attributes.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.PrintBoxes.check(pdf)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "artbox_and_trimbox.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.PrintBoxes.check(pdf,)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("PrintBoxes", issue.rule)
self.assertEqual("A page cannot have both ArtBox and TrimBox, but both were found; TrimBox is preferred",
issue.desc)
filename = os.path.join(pdf_folder, "no_artbox_or_trimbox.pdf")
with pikepdf.open(filename) as pdf:
issues = rules.PrintBoxes.check(pdf,)
issue = issues[0]
self.assertEqual(1, issue.page)
self.assertEqual("PrintBoxes", issue.rule)
self.assertEqual("ArtBox or TrimBox is required, but neither was found; TrimBox is preferred.", issue.desc)
def test_rule__root_has_keys(self):
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/Type"]
issues = rules.RootHasKeys.check(pdf, entries)
self.assertEqual(None, issues)
filename = os.path.join(pdf_folder, "72ppi.pdf")
with pikepdf.open(filename) as pdf:
entries = ["/OutputIntents"]
issues = rules.RootHasKeys.check(pdf, entries)
issue = issues[0]
self.assertEqual("Metadata", issue.page)
self.assertEqual("RootHasKeys", issue.rule)
self.assertEqual("Root dict missing required key '/OutputIntents'", issue.desc)
|
StarcoderdataPython
|
1832128
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from github_com.TheThingsNetwork.api.gateway import gateway_pb2 as github_dot_com_dot_TheThingsNetwork_dot_api_dot_gateway_dot_gateway__pb2
from github_com.TheThingsNetwork.api.router import router_pb2 as github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class RouterStub(object):
"""The Router service provides pure network functionality
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GatewayStatus = channel.stream_unary(
'/router.Router/GatewayStatus',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_gateway_dot_gateway__pb2.Status.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Uplink = channel.stream_unary(
'/router.Router/Uplink',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.UplinkMessage.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Subscribe = channel.unary_stream(
'/router.Router/Subscribe',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.SubscribeRequest.SerializeToString,
response_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DownlinkMessage.FromString,
)
self.Activate = channel.unary_unary(
'/router.Router/Activate',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DeviceActivationRequest.SerializeToString,
response_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DeviceActivationResponse.FromString,
)
class RouterServicer(object):
"""The Router service provides pure network functionality
"""
def GatewayStatus(self, request_iterator, context):
"""Gateway streams status messages to Router
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Uplink(self, request_iterator, context):
"""Gateway streams uplink messages to Router
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Gateway subscribes to downlink messages from Router
It is possible to open multiple subscriptions (but not recommended).
If you do this, you are responsible for de-duplication of downlink messages.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Activate(self, request, context):
"""Gateway requests device activation
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RouterServicer_to_server(servicer, server):
rpc_method_handlers = {
'GatewayStatus': grpc.stream_unary_rpc_method_handler(
servicer.GatewayStatus,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_gateway_dot_gateway__pb2.Status.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Uplink': grpc.stream_unary_rpc_method_handler(
servicer.Uplink,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.UplinkMessage.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.SubscribeRequest.FromString,
response_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DownlinkMessage.SerializeToString,
),
'Activate': grpc.unary_unary_rpc_method_handler(
servicer.Activate,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DeviceActivationRequest.FromString,
response_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.DeviceActivationResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'router.Router', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class RouterManagerStub(object):
"""The RouterManager service provides configuration and monitoring functionality
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GatewayStatus = channel.unary_unary(
'/router.RouterManager/GatewayStatus',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.GatewayStatusRequest.SerializeToString,
response_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.GatewayStatusResponse.FromString,
)
self.GetStatus = channel.unary_unary(
'/router.RouterManager/GetStatus',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.StatusRequest.SerializeToString,
response_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.Status.FromString,
)
class RouterManagerServicer(object):
"""The RouterManager service provides configuration and monitoring functionality
"""
def GatewayStatus(self, request, context):
"""Gateway owner or network operator requests Gateway status from Router Manager
Deprecated: Use monitor API (NOC) instead of this
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStatus(self, request, context):
"""Network operator requests Router status
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RouterManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
'GatewayStatus': grpc.unary_unary_rpc_method_handler(
servicer.GatewayStatus,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.GatewayStatusRequest.FromString,
response_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.GatewayStatusResponse.SerializeToString,
),
'GetStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetStatus,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.StatusRequest.FromString,
response_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_router_dot_router__pb2.Status.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'router.RouterManager', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
StarcoderdataPython
|
9645815
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2020-04-07 20:22:19
# @Author : 崔立波 (<EMAIL>)
# @Link : http://blog.sina.com.cn/dejavu1
# @Version : 1
import requests
import time
from docx import Document
from docx.shared import Inches
from docx.shared import Pt
from docx.oxml.ns import qn
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.section import WD_SECTION
from docx.enum.style import WD_STYLE
from docx.enum.section import WD_ORIENT
import correct_text as c
###############################################
#以下开始
def doc():
print("checkerror('文件路径'),输出至D:\\")
def checkerror(path):
strslice=path.split("\\",-1)
for x in enumerate(strslice):
if x[1].find(".do")>0:
wenjianming=x[1]
mk=wenjianming.find(".docx")
wenjianming=wenjianming[0:mk]
document=Document(path)
for x in document.paragraphs:
# print(x.text)
if x.text==" ":
continue
if x.text==" ":
continue
if x.text=="\r\n":
continue
if x.text.strip()=="":
continue
try:
c.txt_correction(x.text)
except Exception as e:
print("此处无法识别")
try:
document=Document(path)
# print(1)
except Exception as e:
document=Document()
for paragraph in document.paragraphs:
for run in paragraph.runs:
if "%" in run.text:
run.font.name = u'Times New Roman'
print("修改%")
if "." in run.text:
run.font.name = u'Times New Roman'
print("修改.")
if "," in run.text:
run.text=run.text.replace(",",",")
run.font.name = u'仿宋_GB2312'
print("修改,")
if ")" in run.text:
run.text=run.text.replace(")",")")
# run.font.name = u'仿宋_GB2312'
print("修改)")
if "(" in run.text:
run.text=run.text.replace("(","(")
# run.font.name = u'仿宋_GB2312'
print("修改(")
if "[" in run.text:
run.font.name = u'仿宋_GB2312'
print("修改[")
if "]" in run.text:
run.font.name = u'仿宋_GB2312'
print("修改]")
pa="D:/"+wenjianming+time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))+".docx"
print(pa)
document.save("D:/"+wenjianming+time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))+".docx")
|
StarcoderdataPython
|
3580315
|
class BaseManager:
@classmethod
async def delete(cls, obj_id):
result = await cls.update_one(
{"$or": [{"activity.object.id": obj_id},
{"activity.id": obj_id}],
"deleted": False},
{'$set': {"deleted": True}}
)
return result.modified_count
|
StarcoderdataPython
|
6514656
|
<reponame>UltrosBot/Ultros3K<filename>src/ultros/core/networks/__init__.py
# coding=utf-8
"""
Networks - TODO: Describe
Modules
=======
.. currentmodule:: ultros.core.networks
.. autosummary::
:toctree: networks
base
manager
"""
__author__ = "<NAME>"
|
StarcoderdataPython
|
1606285
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import s3direct.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Episode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(default=b'', max_length=100, blank=True)),
('audio_file_path', s3direct.fields.S3DirectField()),
('thumbnail', s3direct.fields.S3DirectField()),
('description', models.TextField()),
],
options={
'ordering': ('created',),
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(max_length=50)),
('image_file', s3direct.fields.S3DirectField()),
],
options={
'ordering': ('created',),
},
),
migrations.AddField(
model_name='episode',
name='tags',
field=models.ManyToManyField(to='episodes.Tag', blank=True),
),
]
|
StarcoderdataPython
|
6616137
|
<filename>UDPFileSender.py
import socket
import sys
#create file
def createFile():
with open("file_object.txt", 'w') as fileContent:
fileContent.write('This is just cray cray\n')
fileContent.close()
#read file
def readFile():
with open('file_object.txt', 'r') as fileContent:
content = fileContent.read()
if content is None:
fileContent.close()
return content
#send the file data
def sendData(sock, dest):
data = readFile()
sock.sendto(bytes(data, 'UTF-8'), dest)
#setup socket
def setupUDPSocket():
#create udp socket
_socket = socket.socket(socket.AF_INET, #internet
socket.SOCK_DGRAM) # UDP
#forceibly bind to port in use
_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return _socket
#main
if __name__=='__main__':
#destination address and port
package_destination = ("192.168.0.67", 4001)
#create udp socket
sock = setupUDPSocket()
#create a file to read
createFile()
#read the file
readFile()
#send file content over udp
sendData(sock, package_destination)
|
StarcoderdataPython
|
8115247
|
result = 0
"""Sue 500: pomeranians: 10, cats: 3, vizslas: 5"""
sues = []
with open("input.txt", "r") as input:
for line in input:
line = line.strip().replace(":", "").replace(",", "")
data = line.split()
keys = [s for (i, s) in enumerate(data) if i % 2 == 0]
values = [int(s) for (i, s) in enumerate(data) if i % 2 == 1]
sues.append({k: v for (k, v) in zip(keys, values)})
presentInfo = {
"children": 3,
"cats": 7,
"samoyeds": 2,
"pomeranians": 3,
"akitas": 0,
"vizslas": 0,
"goldfish": 5,
"trees": 3,
"cars": 2,
"perfumes": 1
}
gtarray = ["cats", "trees"]
ltarray = ["pomeranians", "goldfish"]
def checkSues():
for sue in sues:
flag = True
for info in presentInfo:
if info in sue:
if info in gtarray and sue[info] <= presentInfo[info]:
flag = False
elif info in ltarray and sue[info] >= presentInfo[info]:
flag = False
elif info not in gtarray + ltarray and sue[info] != presentInfo[info]:
flag = False
if flag == True:
return sue["Sue"]
result = checkSues()
with open("output2.txt", "w") as output:
output.write(str(result))
print(str(result))
|
StarcoderdataPython
|
1741900
|
<reponame>Oscar-Oliveira/Python3
"""
Aritmetic
"""
a = 2 + 2
print("a = 2 + 2 = {}".format(a))
a = a - 2
print("a - 2 = {}".format(a))
a = a * 2
print("a = a * 2 = {}".format(a))
a = a / 2
print("a = a / 2 {}".format(a)) # The result of division is always a float
print()
a = a ** 2 # Exponentiation (x**y, x to the power of y)
print("a = a ** 2 = {}".format(a))
print()
# // integer division, divides and rounds DOWN to the nearest whole number
b = a // 3
print("b = a // 3 {}".format(b))
b = -a // 3
print("b = -a // 3 = {}".format(b))
print()
b = (a * 3) // 2.5
print("b = (a * 3) // 2.5 = {}".format(b))
c = (a * 3) % 2.5 # Modulo
print("c = (a * 3) % 2.5 = {}".format(b))
c = (b * 2.5) + c
print("c = (b * 2.5) + c = {}".format(c))
print()
a += 2 # variable = variable operator value, a = a + 2
print("a += 2 (a={})".format(a))
a -= 2 # a = a - 2
print("a += 2 (a={})".format(a))
a *= 2
print("a *= 2 (a={})".format(a))
a /= 2
print("a /= 2 (a={})".format(a))
a **= 2
print("a **= 2 (a={})".format(a))
a //= 2
print("a //= 2 (a={})".format(a))
a %= 2
print("a %= 2 (a={})".format(a))
|
StarcoderdataPython
|
6613814
|
<filename>2018/aoc2018_5b.py<gh_stars>1-10
# Advent Of Code 2018, day 5, part 2
# http://adventofcode.com/2018/day/5
# solution by ByteCommander, 2018-12-05
from collections import deque
from string import ascii_lowercase
with open("inputs/aoc2018_5.txt") as file:
whole_molecule = file.read().strip()
shortest = len(whole_molecule)
for blocker in ascii_lowercase:
molecule = deque(whole_molecule.replace(blocker, "").replace(blocker.upper(), ""))
result = []
while molecule:
if not result:
result.append(molecule.popleft())
unit = molecule.popleft() if molecule else ""
if unit.swapcase() == result[-1]:
result.pop()
else:
result.append(unit)
shortest = min(shortest, len(result))
print(f"The shortest fully reacted molecule after removing one type is {shortest} units long.")
|
StarcoderdataPython
|
8019458
|
def find_license_gitlab():
""" """
|
StarcoderdataPython
|
8125359
|
<gh_stars>10-100
"""banning support
Revision ID: 804005e79950
Revises: <KEY>
Create Date: 2019-08-16 21:34:36.679754
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '804005e79950'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('banned', sa.Boolean(), nullable=False, server_default='0'))
batch_op.add_column(sa.Column('ip_address', sa.String(length=100), nullable=False, server_default=''))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.drop_column('ip_address')
batch_op.drop_column('banned')
# ### end Alembic commands ###
|
StarcoderdataPython
|
4897279
|
<reponame>Osmose/olympia
from django.conf.urls import include, url
from django.shortcuts import redirect
from olympia.amo.urlresolvers import reverse
from olympia.browse.feeds import (
ExtensionCategoriesRss, FeaturedRss, SearchToolsRss, ThemeCategoriesRss)
from . import views
impala_patterns = [
# TODO: Impalacize these views.
url('^extensions/(?P<category>[^/]+)/featured$',
views.legacy_creatured_redirect,
name='i_browse.creatured'),
url('^language-tools/(?P<category>[^/]+)?$', views.language_tools,
name='i_browse.language-tools'),
url('^search-tools/(?P<category>[^/]+)?$', views.search_tools,
name='i_browse.search-tools'),
]
urlpatterns = [
url('^i/', include(impala_patterns)),
url('^language-tools/(?P<category>[^/]+)?$', views.language_tools,
name='browse.language-tools'),
url('^dictionaries$',
lambda r: redirect(reverse('browse.language-tools'), permanent=True)),
url('^featured$',
lambda r: redirect(reverse('browse.extensions') + '?sort=featured',
permanent=True)),
# Full Themes are now Complete Themes.
url('^full-themes/(?P<category>[^ /]+)?$',
views.legacy_fulltheme_redirects),
# Personas are now Themes.
url('^personas/(?P<category>[^ /]+)?$',
views.legacy_theme_redirects),
# TODO (percona): Rename this to `browse.themes`.
url('^themes/(?P<category>[^ /]+)?$', views.personas,
name='browse.personas'),
# Themes are now Complete Themes.
url('^themes/(?P<category_name>[^/]+)/format:rss$',
views.legacy_theme_redirects),
# TODO (percona): Rename this to `browse.complete-themes`.
url('^complete-themes/(?P<category>[^/]+)?$', views.themes,
name='browse.themes'),
url('^complete-themes/(?:(?P<category_name>[^/]+)/)?format:rss$',
ThemeCategoriesRss(), name='browse.themes.rss'),
url('^extensions/(?:(?P<category>[^/]+)/)?$', views.extensions,
name='browse.extensions'),
# Creatured URLs now redirect to browse.extensions
url('^extensions/(?P<category>[^/]+)/featured$',
views.legacy_creatured_redirect),
url('^extensions/(?:(?P<category_name>[^/]+)/)?format:rss$',
ExtensionCategoriesRss(), name='browse.extensions.rss'),
url('^browse/type:7$',
lambda r: redirect("https://www.mozilla.org/plugincheck/",
permanent=True)),
url('^browse/type:(?P<type_>\d)(?:/cat:(?P<category>\d+))?'
'(?:/sort:(?P<sort>[^/]+))?(?:/format:(?P<format>[^/]+).*)?',
views.legacy_redirects),
url('^search-tools/(?:(?P<category>[^/]+)/)?format:rss$',
SearchToolsRss(), name='browse.search-tools.rss'),
url('^search-tools/(?P<category>[^/]+)?$', views.search_tools,
name='browse.search-tools'),
url('^featured/format:rss$', FeaturedRss(), name='browse.featured.rss'),
# The plugins page was moved to mozilla.org and so now it is just a
# redirect, per bug 775799.
url('^plugins$',
lambda r: redirect('http://www.mozilla.org/en-US/plugincheck/',
permanent=True)),
]
|
StarcoderdataPython
|
11338872
|
<gh_stars>0
# flake8: noqa
import rastervision.pipeline
from rastervision.core.box import *
from rastervision.core.data_sample import *
from rastervision.core.predictor import *
from rastervision.core.raster_stats import *
# We just need to import anything that contains a Config, so that all
# the register_config decorators will be called which add Configs to the
# registry.
import rastervision.core.backend
import rastervision.core.data
import rastervision.core.rv_pipeline
import rastervision.core.evaluation
def register_plugin(registry):
registry.set_plugin_version('rastervision.core', 3)
registry.set_plugin_aliases('rastervision.core', ['rastervision2.core'])
from rastervision.core.cli import predict
registry.add_plugin_command(predict)
|
StarcoderdataPython
|
4969353
|
<filename>dev-test/Daniel_version_of_code/3D_Animation_Daniel.py
"""
@author: Daniel
"""
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D #imports the required part to add the 3rd axis for plot to becomde 3D
import matplotlib.animation as animation
#####################################Initial Conditions
p0=[0,0,0] #Initial position
v0=[1,1,1] #Initial velocity
B=[0,0,1] #Initial magnetic field
E=[0,0,0] #Initial electric field
q=1 #Charge
m=1 #mass
dt=0.1 #time interval
stop=100
steps=int(stop/dt)
###################################################
p=sp.zeros((steps,3))######Initial empty arrays
v=sp.zeros((steps,3))
v[0]=v0###Defining zeroth entry
p[0]=p0
##############################################################################
def cross(v1): #Cross product function taking velocity and magnetic field
return (q/m)*(E+sp.cross(v1,B))
##############################################################################
for i in range(0,steps-1): #4th Order Runge-Kutta
p1=dt*v[i]
v1=dt*cross(v[i])
p2=dt*(v[i]+0.5*v1)
v2=dt*cross(v[i]+0.5*v1)
p3=dt*(v[i]+0.5*v2)
v3=dt*cross(v[i]+0.5*v2)
p4 = dt * (v[i] + v3)
v4 = dt * cross(v[i] + v3)
p[i+1]=p[i] + (p1 +2*p2 +2*p3 +p4)*(1/6)
v[i+1]=v[i] + (v1 + 2*v2+2*v3+v4)*(1/6)
##############################################################################
fig = plt.figure()
FRAMES=100 #Number og grames
ax = Axes3D(fig)
##############################################################################
#ax.plot3D(p[:,0], p[:,1], p[:,2]) #Plots the stationary 3D curve
##############################################################################
def func(i):
current_index = int(1000 / 10* i)
ax.plot3D(p[:current_index, 0], p[:current_index, 1], p[:current_index, 2],color='g') #Function which is iterrated and creates the plot
ax.set_xlim3d([sp.amin(p[:,0]),sp.amax(p[:,0])]) #######Sets limits to the axis so that we can always see the whole plot
ax.set_xlabel('X')
ax.set_ylim3d([sp.amin(p[:,1]),sp.amax(p[:,1])])
ax.set_ylabel('Y')
ax.set_zlim3d([sp.amin(p[:,2]),sp.amax(p[:,2])])
ax.set_zlabel('Z')
ax.set_title('3D Test')
anim = animation.FuncAnimation(fig, func,
frames=FRAMES, interval=100) #This creates the animation
plt.show()
|
StarcoderdataPython
|
3356205
|
#
# Copyright 2013 Intel Corp.
# Copyright 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fnmatch
import os
import pkg_resources
from oslo_log import log
import yaml
LOG = log.getLogger(__name__)
class ConfigException(Exception):
def __init__(self, cfg_type, message, cfg):
self.cfg_type = cfg_type
self.msg = message
self.cfg = cfg
def __str__(self):
return '%s %s: %s' % (self.cfg_type, self.cfg, self.msg)
class SourceException(Exception):
def __init__(self, message, cfg):
self.msg = message
self.cfg = cfg
def __str__(self):
return 'Source definition invalid: %s (%s)' % (self.msg, self.cfg)
class ConfigManagerBase(object):
"""Base class for managing configuration file refresh"""
def __init__(self, conf):
self.conf = conf
def load_config(self, cfg_file):
"""Load a configuration file and set its refresh values."""
if os.path.exists(cfg_file):
cfg_loc = cfg_file
else:
cfg_loc = self.conf.find_file(cfg_file)
if not cfg_loc:
LOG.debug("No pipeline definitions configuration file found! "
"Using default config.")
cfg_loc = pkg_resources.resource_filename(
__name__, 'pipeline/data/' + cfg_file)
with open(cfg_loc) as fap:
conf = yaml.safe_load(fap)
LOG.debug("Config file: %s", conf)
return conf
class Source(object):
"""Represents a generic source"""
def __init__(self, cfg):
self.cfg = cfg
try:
self.name = cfg['name']
except KeyError as err:
raise SourceException(
"Required field %s not specified" % err.args[0], cfg)
def __str__(self):
return self.name
def check_source_filtering(self, data, d_type):
"""Source data rules checking
- At least one meaningful datapoint exist
- Included type and excluded type can't co-exist on the same pipeline
- Included type meter and wildcard can't co-exist at same pipeline
"""
if not data:
raise SourceException('No %s specified' % d_type, self.cfg)
if (any(x for x in data if x[0] not in '!*') and
any(x for x in data if x[0] == '!')):
raise SourceException(
'Both included and excluded %s specified' % d_type,
self.cfg)
if '*' in data and any(x for x in data if x[0] not in '!*'):
raise SourceException(
'Included %s specified with wildcard' % d_type,
self.cfg)
@staticmethod
def is_supported(dataset, data_name):
# Support wildcard like storage.* and !disk.*
# Start with negation, we consider that the order is deny, allow
if any(fnmatch.fnmatch(data_name, datapoint[1:])
for datapoint in dataset if datapoint[0] == '!'):
return False
if any(fnmatch.fnmatch(data_name, datapoint)
for datapoint in dataset if datapoint[0] != '!'):
return True
# if we only have negation, we suppose the default is allow
return all(datapoint.startswith('!') for datapoint in dataset)
|
StarcoderdataPython
|
3251562
|
<reponame>vtta2008/pipelineTool
# -*- coding: utf-8 -*-
"""
Script Name: FooterCheckBoxes.py
Author: <NAME>/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from pyPLM.Widgets import GroupGrid
class FooterCheckBoxes(GroupGrid):
key = 'FooterCheckBoxes'
def __init__(self, title, parent=None):
super(FooterCheckBoxes, self).__init__(title, parent)
self.parent = parent
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 2/12/2019 - 7:12 AM
# © 2017 - 2018 DAMGteam. All rights reserved
|
StarcoderdataPython
|
4831446
|
<reponame>JulieRossi/drf-friendly-errors
from django.template.defaultfilters import title
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework_friendly_errors.mixins import FriendlyErrorMessagesMixin
from tests.models import LANGUAGE_CHOICES, Snippet
def is_proper_title(value):
if value and value != title(value):
raise ValidationError('Incorrect title')
class SnippetSerializer(FriendlyErrorMessagesMixin, serializers.Serializer):
pk = serializers.IntegerField(read_only=True)
title = serializers.CharField(max_length=10, validators=[is_proper_title])
comment = serializers.CharField(max_length=255)
code = serializers.CharField(style={'base_template': 'textarea.html'})
linenos = serializers.BooleanField(required=False)
language = serializers.ChoiceField(choices=LANGUAGE_CHOICES)
rating = serializers.DecimalField(max_digits=3, decimal_places=1)
posted_date = serializers.DateTimeField()
def validate_comment(self, value):
if value[0] != value[0].upper():
raise ValidationError('First letter must be an uppercase')
return value
def validate(self, attrs):
# if phrase python is in title, language must be python as well
language = attrs.get('language')
title = attrs.get('title')
if 'python' in title.lower() and language != 'python':
raise ValidationError('Must be a python language')
return attrs
FIELD_VALIDATION_ERRORS = {'validate_comment': 5000,
'is_proper_title': 5001}
NON_FIELD_ERRORS = {'Must be a python language': 8000}
class AnotherSnippetSerializer(FriendlyErrorMessagesMixin,
serializers.Serializer):
"""
Mirror snippet for test register error mixin method
"""
pk = serializers.IntegerField(read_only=True)
title = serializers.CharField(max_length=10, validators=[is_proper_title])
comment = serializers.CharField(max_length=255)
code = serializers.CharField(style={'base_template': 'textarea.html'})
linenos = serializers.BooleanField(required=False)
language = serializers.ChoiceField(choices=LANGUAGE_CHOICES)
rating = serializers.DecimalField(max_digits=3, decimal_places=1)
posted_date = serializers.DateTimeField()
def validate_comment(self, value):
if value[0] != value[0].upper():
raise ValidationError('First letter must be an uppercase')
return value
def validate(self, attrs):
# if phrase python is in title, language must be python as well
language = attrs.get('language')
title = attrs.get('title')
if 'python' in title.lower() and language != 'python':
self.register_error(error_message='Python, fool!',
error_key='invalid_choice',
field_name='language')
return attrs
FIELD_VALIDATION_ERRORS = {'validate_comment': 5000,
'is_proper_title': 5001}
NON_FIELD_ERRORS = {'Must be a python language': 8000}
class SnippetModelSerializer(FriendlyErrorMessagesMixin,
serializers.ModelSerializer):
class Meta:
model = Snippet
def validate_comment(self, value):
if value[0] != value[0].upper():
raise ValidationError('First letter must be an uppercase')
return value
def validate(self, attrs):
# if phrase python is in title, language must be python as well
language = attrs.get('language')
title = attrs.get('title')
if 'python' in title.lower() and language != 'python':
raise ValidationError('Must be a python language')
return attrs
FIELD_VALIDATION_ERRORS = {'validate_comment': 5000,
'is_proper_title': 5001}
NON_FIELD_ERRORS = {'Must be a python language': 8000}
class AnotherSnippetModelSerializer(FriendlyErrorMessagesMixin,
serializers.ModelSerializer):
class Meta:
model = Snippet
def validate_comment(self, value):
if value[0] != value[0].upper():
raise ValidationError('First letter must be an uppercase')
return value
def validate(self, attrs):
# if phrase python is in title, language must be python as well
language = attrs.get('language')
title = attrs.get('title')
if 'python' in title.lower() and language != 'python':
self.register_error(error_message='Python, fool!',
error_key='invalid_choice',
field_name='language')
return attrs
FIELD_VALIDATION_ERRORS = {'validate_comment': 5000,
'is_proper_title': 5001}
NON_FIELD_ERRORS = {'Must be a python language': 8000}
class ThirdSnippetSerializer(
FriendlyErrorMessagesMixin,
serializers.ModelSerializer
):
class Meta:
model = Snippet
def validate_comment(self, value):
if value[0] != value[0].upper():
self.register_error(
'First letter must be an uppercase', field_name='comment',
error_key='blank'
)
return value
|
StarcoderdataPython
|
11277419
|
<reponame>simflin/tf-explorer<filename>tf-explorer.py
#!/usr/bin/env python
import cmd
import getopt
import numpy as np
import os
import sys
class Node:
def __init__(self, parent, name, is_terminal):
self.parent = parent or self
self.name = name
self.children = []
self.is_terminal = is_terminal
def child(self, name):
for n in self.children:
if n.name == name:
return n
return None
@property
def _root(self):
while self.parent != self:
self = self.parent
return self
def insert(self, name, is_terminal):
node = self
components = name.split('/')
if len(name) > 0 and name[0] == '/':
node = node._root
for elem in components:
if elem == '' or elem == '.':
pass
elif elem == '..':
node = node.parent
else:
n = node.child(elem)
if n is None:
# Interior nodes are non-terminal
n = Node(node, elem, is_terminal=False)
node.children.append(n)
node = n
node.is_terminal = is_terminal
return node
def find(self, name):
node = self
components = name.split('/')
if len(name) > 0 and name[0] == '/':
node = node._root
for elem in components:
if elem == '' or elem == '.':
pass
elif elem == '..':
node = node.parent
else:
node = node.child(elem)
if node is None:
return None
return node
def find_terminal_nodes(self):
all_terminal_nodes = []
to_visit = [self]
while len(to_visit) > 0:
node = to_visit.pop(0)
if node.is_terminal:
all_terminal_nodes.append(node)
to_visit += node.children
return all_terminal_nodes
def move(self, src, dest):
def find_terminal_names(node):
ret = []
def recurse(node):
if node.is_terminal:
ret.append(node.full_name)
for child in node.children:
recurse(child)
recurse(node)
return ret
original_names = find_terminal_names(src)
# Remove last path component to compute `dest_dir`
components = dest.split('/')
dest_dir = '/'.join(components[:-1])
dest_dir = self.insert(dest_dir, is_terminal=False)
if components[-1] != '':
if dest_dir.find(components[-1]):
return None
src.name = components[-1]
# Unlink child from parent
src.parent.children.remove(src)
# Clean up dangling non-terminal nodes created by removing last child
parent = src.parent
while not parent.is_terminal and not parent.is_directory:
parent.parent.children.remove(parent)
parent = parent.parent
# Finish rest of relinking source to destination
src.parent = None
dest_dir.children.append(src)
src.parent = dest_dir
final_names = find_terminal_names(src)
return list(zip(original_names, final_names))
@property
def full_name(self):
node = self
elems = []
while node != node.parent:
elems = [node.name] + elems
node = node.parent
return '/'.join(elems)
@property
def is_directory(self):
return len(self.children) > 0
class ExplorerShell(cmd.Cmd):
intro = 'Type help or ? to list commands.\n'
prompt = '> '
file = None
# Hack to handle CTRL+C. Shamelessly stolen from StackOverflow:
# https://stackoverflow.com/questions/8813291/better-handling-of-keyboardinterrupt-in-cmd-cmd-command-line-interpreter
def cmdloop(self, intro=None):
print(self.intro)
while True:
try:
super(ExplorerShell, self).cmdloop(intro='')
break
except KeyboardInterrupt:
print('^C')
def default(self, line):
print('{}: unknown command.'.format(line.split()[0]))
def __init__(self, checkpoint_dir):
super(ExplorerShell, self).__init__()
checkpoint_filename = checkpoint_dir
if tf.gfile.IsDirectory(checkpoint_dir):
checkpoint_state = tf.train.get_checkpoint_state(checkpoint_dir)
if checkpoint_state is None:
print('Checkpoint file not found in {}.'.format(checkpoint_dir))
sys.exit(1)
checkpoint_filename = checkpoint_state.model_checkpoint_path
try:
tf.train.load_checkpoint(checkpoint_filename)
except ValueError:
print('Unable to load checkpoint from {}.'.format(checkpoint_filename))
sys.exit(1)
print('Checkpoint loaded from {}.'.format(checkpoint_filename))
self._checkpoint = checkpoint_filename
self._root = self._build_tree(self._all_vars())
self._cwd = self._root
self._prevwd = '/'
self._renames = {}
self._loads = {}
def help_ls(self):
print('ls - lists directory contents.')
print('Syntax: ls [PATH]')
def complete_ls(self, text, line, begidx, endidx):
args = line.split()
if len(args) > 2:
return []
text = args[-1] if len(args) == 2 else ''
path = text.split('/')
if len(path) == 0:
node = self._cwd
filt = ''
else:
filt = path[-1]
path[-1] = ''
node = self._cwd.find('/'.join(path))
return [n.name + ('/' if n.is_directory else '') for n in node.children if n.name.startswith(filt)]
def do_ls(self, arg):
arg = arg.split()
if len(arg) == 1:
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
return
if not target.is_directory:
print('/{}'.format(target.full_name))
return
elif len(arg) > 1:
print('ls only supports a single optional argument.')
return
else:
target = self._cwd
for child in sorted(target.children, key=lambda x: x.name):
name = child.name
if not child.is_terminal:
name = '@' + name
if child.is_directory:
name += '/'
print(name)
def help_tensors(self):
print('tensors - lists all tensors in checkpoint.')
print('Syntax: tensors')
def do_tensors(self, arg):
for name, shape in tf.contrib.framework.list_variables(self._checkpoint):
print('{} : [{}]'.format(name, ', '.join(map(str, shape))))
def help_cd(self):
print('cd - change directory.')
print('Syntax: cd DIR')
def complete_cd(self, text, line, begidx, endidx):
args = line.split()
if len(args) > 2:
return []
text = args[-1] if len(args) == 2 else ''
path = text.split('/')
if len(path) == 0:
node = self._cwd
filt = ''
else:
filt = path[-1]
path[-1] = ''
node = self._cwd.find('/'.join(path))
return [n.name + '/' for n in node.children if n.is_directory and n.name.startswith(filt)]
def do_cd(self, arg):
arg = arg.split()
if len(arg) != 1:
print('cd: invalid usage.')
return
if arg[0] == '-':
arg[0] = self._prevwd
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
elif not target.is_directory:
print('{}: not a directory'.format(arg[0]))
elif self._cwd != target:
self._prevwd = '/' + self._cwd.full_name
self._cwd = target
def help_pwd(self):
print('pwd - print working directory.')
print('Syntax: pwd')
def do_pwd(self, arg):
print('/{}'.format(self._cwd.full_name))
def help_shape(self):
print('shape - print shape of tensor to console.')
print('Syntax: shape TENSOR')
def complete_shape(self, text, line, begidx, endidx):
return self.complete_cat(text, line, begidx, endidx)
def do_shape(self, arg):
arg = arg.split()
if len(arg) != 1:
print('shape: invalid usage.')
return
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
elif not target.is_terminal:
print('{}: not a tensor.'.format(arg[0]))
else:
# If the tensor was renamed but not committed, find the original name so we can look it up
# in the checkpoint file.
name = target.full_name
if name in self._renames:
name = self._renames[name]
print(list(tf.contrib.framework.load_variable(self._checkpoint, name).shape))
def help_parameters(self):
print('parameters - print the number of training parameters under a scope.')
print('Syntax: parameters [PATH]')
print('Note: the parameter count excludes `Adam` optimizer variables.')
def complete_parameters(self, text, line, begidx, endidx):
return self.complete_cat(text, line, begidx, endidx)
def do_parameters(self, arg):
arg = arg.split()
if len(arg) > 1:
print('parameters: invalid usage.')
if len(arg) == 0:
target = self._cwd
else:
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
return
target_names = [node.full_name for node in target.find_terminal_nodes()]
reader = tf.train.load_checkpoint(self._checkpoint)
var_shape_map = reader.get_variable_to_shape_map()
count = 0
for name in var_shape_map:
if 'Adam' not in name and name in target_names:
count += int(np.prod(var_shape_map[name]))
print('{:,} parameters.'.format(count))
def help_cat(self):
print('cat - print tensor to console.')
print('Syntax: cat TENSOR')
def complete_cat(self, text, line, begidx, endidx):
args = line.split()
if len(args) > 2:
return []
text = args[-1] if len(args) == 2 else ''
path = text.split('/')
if len(path) == 0:
node = self._cwd
filt = ''
else:
filt = path[-1]
path[-1] = ''
node = self._cwd.find('/'.join(path))
return [n.name + ('/' if n.is_directory else '') for n in node.children if n.name.startswith(filt)]
def do_cat(self, arg):
arg = arg.split()
if len(arg) != 1:
print('cat: invalid usage.')
return
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
elif not target.is_terminal:
print('{}: not a tensor.'.format(arg[0]))
else:
# If the tensor was renamed but not committed, find the original name so we can look it up
# in the checkpoint file.
name = target.full_name
if name in self._loads:
value = self._loads[name]
# If numpy byte string, convert to a decoded Python string for printing.
if value.dtype.kind == 'S':
value = value.tostring().decode()
else:
if name in self._renames:
name = self._renames[name]
value = tf.contrib.framework.load_variable(self._checkpoint, name)
if isinstance(value, bytes):
print(value.decode())
else:
print(value)
def help_save(self):
print('save - save tensor to disk as numpy array.')
print('Syntax: save TENSOR FILENAME')
def do_save(self, arg):
arg = arg.split()
if len(arg) != 2:
print('save: invalid usage.')
return
target = self._cwd.find(arg[0])
if target is None:
print('{}: not found.'.format(arg[0]))
elif not target.is_terminal:
print('{}: not a tensor.'.format(arg[0]))
else:
# If the tensor was renamed but not committed, find the original name so we can look it up
# in the checkpoint file.
name = target.full_name
if name in self._renames:
name = self._renames[name]
tensor = tf.contrib.framework.load_variable(self._checkpoint, name)
try:
np.save(arg[1], tensor, allow_pickle=False)
except Exception as e:
print(str(e))
def help_load(self):
print('load - loads a numpy tensor from disk into the current checkpoint.')
print('Syntax: load TENSOR FILENAME')
print('Note: the operation is performed in-memory. To write changes back to the checkpoint, run `commit` after `load`.')
def do_load(self, arg):
arg = arg.split()
if len(arg) != 2:
print('load: invalid usage.')
return
try:
value = np.load(arg[1], allow_pickle=False)
except Exception as e:
print(str(e))
return
target = self._cwd.find(arg[0])
if target is None:
target = self._cwd.insert(arg[0], is_terminal=True)
elif not target.is_terminal:
target.is_terminal = True
name = target.full_name
self._loads[name] = value
def help_mv(self):
print('mv - move/rename tensor or directory.')
print('Syntax: mv SRC DEST')
print('Note: the operation is performed in-memory. To write changes back to the checkpoint, run `commit` after `mv`.')
def _add_or_update(self, old_name, new_name):
for k, v in self._renames.items():
if v == old_name:
self._renames[k] = new_name
return
self._renames[old_name] = new_name
def do_mv(self, arg):
arg = arg.split()
if len(arg) != 2:
print('mv: invalid usage.')
return
src, dest = arg
src = self._cwd.find(src)
if src is None:
print('{}: invalid source.'.format(arg[0]))
return
mutations = self._cwd.move(src, dest)
if mutations is not None:
# Rename pending loads if needed.
for old_name, new_name in mutations:
self._add_or_update(old_name, new_name)
if old_name in self._loads:
self._loads[new_name] = self._loads.pop(old_name)
else:
print('mv: cannot relink {} to {}'.format(arg[0], arg[1]))
def help_mutations(self):
print('mutations - list all in-memory move/rename/load operations that have not been written to disk yet.')
print('Syntax: mutations')
def do_mutations(self, arg):
for src, dest in sorted(self._renames.items()):
print('[Rename] {} -> {}'.format(src, dest))
for key, _ in sorted(self._loads.items()):
print('[Load] {}'.format(key))
def help_commit(self):
print('commit - writes all pending mutations to the checkpoint.')
print('Syntax: commit')
def do_commit(self, arg):
'''Commits all pending changes to the checkpoint.'''
if not self._dirty:
print('Nothing to commit.')
return
def commit(replacements, loads):
tf.reset_default_graph()
with tf.Session() as session:
for name in self._all_vars():
if name in loads:
var = loads[name]
loads.pop(name)
else:
var = tf.contrib.framework.load_variable(self._checkpoint, name)
if name in replacements:
name = replacements[name]
var = tf.Variable(var, name=name)
# Add new variables to checkpoint if they didn't exist before.
for name, value in loads.items():
var = tf.Variable(value, name=name)
session.run(tf.global_variables_initializer())
tf.train.Saver().save(session, self._checkpoint)
commit(self._renames, self._loads)
self._renames = {}
self._loads = {}
def help_exit(self):
print('exit - exits the shell.')
print('Syntax: exit')
def do_exit(self, arg):
self.do_EOF(self, arg)
def help_EOF(self):
print('^D - exits the shell.')
print('Syntax: ^D')
def do_EOF(self, arg):
print('exit')
if self._dirty:
print('WARNING: there are pending mutations that have not been written to disk. Discard (y/N)? ', end='', flush=True)
line = sys.stdin.readline().strip().lower()
if line == 'y' or line == 'yes':
print('')
return True
else:
print('You can view the pending mutations with the `mutations` command or write them out to disk with `commit`.')
return
print()
return True
def _all_vars(self):
return [var_name for var_name, _ in tf.contrib.framework.list_variables(self._checkpoint)]
def _build_tree(self, names):
root = Node(None, '', is_terminal=False)
for name in names:
root.insert(name, is_terminal=True)
return root
@property
def _dirty(self):
return len(self._renames) > 0 or len(self._loads) > 0
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage: {} CHECKPOINT'.format(sys.argv[0]))
sys.exit(-1)
# Ugh.
import tensorflow as tf
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '4'
tf.logging.set_verbosity(tf.logging.ERROR)
ExplorerShell(sys.argv[1]).cmdloop()
|
StarcoderdataPython
|
6613372
|
# -*- coding: utf-8 -*-
from scrapy import Spider, Request
from ..items import ChannelItem, RoomItem
import json
class BilibiliSpider(Spider):
name = 'bilibili'
allowed_domains = ['bilibili.com']
start_urls = [
'http://live.bilibili.com/area/live'
]
custom_settings = {
'SITE': {
'code': 'bilibili',
'name': '哔哩哔哩',
'description': '哔哩哔哩-关注ACG直播互动平台',
'url': 'http://live.bilibili.com',
'image': 'http://static.hdslb.com/live-static/common/images/logo/logo-150-cyan.png',
'show_seq': 3
}
}
def parse(self, response):
panel_class = ['live-top-nav-panel', 'live-top-hover-panel']
panel_xpath = ['contains(@class, "{}")'.format(pclass) for pclass in panel_class]
room_query_list = []
for a_element in response.xpath('//div[{}]/a'.format(' and '.join(panel_xpath)))[1:-2]:
div_element = a_element.xpath('div[@class="nav-item"]')[0]
url = a_element.xpath('@href').extract_first()
if '/pages/area/' in url:
i_class = div_element.xpath('i/@class').extract_first().split(' ')
short = i_class[-1]
url = self.custom_settings['SITE']['url'] + '/' + short
else:
short = url[url.rfind('/') + 1:]
name = div_element.xpath('text()').extract_first()
yield ChannelItem({'short': short, 'name': name, 'url': response.urljoin(url)})
url = 'http://live.bilibili.com/area/liveList?area={}&order=online'.format(short)
room_query_list.append({'url': url, 'channel': short, 'page': 1})
for room_query in room_query_list:
yield Request('{}&page=1'.format(room_query['url']), callback=self.parse_room_list, meta=room_query)
def parse_room_list(self, response):
room_list = json.loads(response.text)['data']
if isinstance(room_list, list):
for rjson in room_list:
if isinstance(rjson['online'], int):
yield RoomItem({
'office_id': str(rjson['roomid']),
'name': rjson['title'],
'image': rjson['cover'],
'url': response.urljoin(rjson['link']),
'online': rjson['online'],
'host': rjson['uname'],
'channel': response.meta['channel']
})
if len(room_list) > 0:
next_meta = dict(response.meta, page=response.meta['page'] + 1)
yield Request('{}&page={}'.format(next_meta['url'], str(next_meta['page'])),
callback=self.parse_room_list, meta=next_meta)
|
StarcoderdataPython
|
113669
|
#encoding=utf-8
import re
RE_SEN_SPLITER = u"(.+?(\r|\n|。|!|!|\?|?|;|;|……))"
def get_sentences(text):
if not isinstance(text, (unicode,)): return []
s = re.sub(RE_SEN_SPLITER, lambda x: x.group(0)+"\t", text)
return [x.strip() for x in s.split('\t') if x.strip() != '']
if __name__ == "__main__":
text = u'''在上市之后,京东的股价虽然也有突出的表现,但是也曾在上市第二天险些破发,不过随后,股价一路上涨,至6月12日,创出了29.6美元/股的新高,不过当日股价仍以跌幅6.31%收盘。在随后的价格交易日中,股价出现涨跌轮转的情况,截至6月30日,公司股价收于28.51美元/股,当日涨幅为1.79%。
虽然京东是迄今为止中国在美上市公司规模最大的IPO,但是华尔街正等待着今年下半年阿里巴巴集团里程碑式的股票发行交易,据信阿里巴巴的募资规模可能高达200亿美元。
迅雷“新贵上市”'''
sentences = get_sentences(text)
for sen in sentences:
print sen
print '='*10
|
StarcoderdataPython
|
11212047
|
import subprocess
test_number = 1
while True:
try:
file = open("./system_tests/test" + str(test_number) + ".txt", 'r')
except Exception:
print("OK", test_number - 1, "tests passed")
exit()
result = subprocess.run(["python3", "solution.py"], stdin=file, stdout=subprocess.PIPE)
with open("./system_tests/answer" + str(test_number) + ".txt", 'r') as ans:
if result.stdout.decode("ascii") != ans.read():
print("WA" + str(test_number))
exit()
file.close()
test_number = test_number + 1
|
StarcoderdataPython
|
47685
|
"""
define model for gp
"""
# from threading import Thread
# from queue import Queue
from multiprocessing import Pool
from random import random, randint
from math import floor
import operator
from autoprover.gp.gene import Gene
from autoprover.gp.rule import GeneRule
from autoprover.gp.action import GeneAction
from autoprover.gp.trigger import GeneTrigger
from autoprover.gp.restriction import Restriction
#TODO fox too many instant
class GPModel:
"""
gp model
"""
#TODO fix too many args
def __init__(self, args=None, populationSize=None, maxGeneration=None,
mutateRate=None, eliteRate=None, crossRate=None,
crossType=None, verifyNum=None, proof=None, tactics=None,
limit_hyp=None, limit_goal=None):
self.population_size = populationSize or args.populationSize
self.max_generation = maxGeneration or args.maxGeneration
self.mutate_rate = mutateRate or args.mutateRate
self.elite_rate = eliteRate or args.eliteRate
self.cross_rate = crossRate or args.crossRate
self.cross_type = crossType or args.crossType
self.verify_num = verifyNum or args.verifyNum
self.limit_hyp = limit_hyp or args.limit_hyp
self.limit_goal = limit_goal or args.limit_goal
self.debug = args.debug
self.proof = proof
self.tactics = tactics
self.population = None
self.current_generation = 1
self.proofs = []
self.rules = []
self.init_population(self.population_size)
self.pre_process()
def show_prop(self):
"""
display property for model
"""
print(self.population_size)
print(self.max_generation)
print(self.mutate_rate)
print(self.elite_rate)
print(self.cross_rate)
print(self.cross_type)
print(self.verify_num)
print(self.proof)
def init_population(self, size):
"""
create population by size
"""
print("Initializing population.")
self.population = []
for _ in range(size):
self.population.append(Gene(self.tactics))
def pre_process(self):
"""
run before start
"""
self.current_generation = 1
self.update_fitness_for_population()
self.fitness_sharing()
self.sort_population()
self.update_tactic_usage()
self.check_proof()
def is_proved(self):
"""
check population has a proof
"""
return len(self.proofs) > 0
def start(self, gen=None):
"""
run the model
"""
if gen is None:
# if gen is not set
local_gen_limit = self.max_generation + 1
else:
local_gen_limit = gen
if self.current_generation > self.max_generation:
return
for _ in range(local_gen_limit):
print("Generation No.{0}".format(self.current_generation))
if self.debug:
self.sort_population()
for index in range(0, 30):
self.population[index].print_lastest()
self.crossover()
self.update_fitness_for_population()
self.apply_rules()
self.fitness_sharing()
self.next_generation()
if self.current_generation > self.max_generation:
break
# self.printGeneByIndex(0, True)
def fitness_sharing(self):
"""Use fitness sharing to re-evaluate fitness"""
for gene in self.population:
gene.fitness = gene.raw_fitness
return
def dist(gene1, gene2):
"""Return distence between two gene"""
return abs(len(gene1.goal) - len(gene2.goal))
for gene in self.population:
raw_fitnesses = [e.raw_fitness for e in self.population if dist(e, gene) <= 5]
gene.fitness = sum(raw_fitnesses) / len(raw_fitnesses)
def next_generation(self):
"""
next generation
"""
print("Avg. fitness\tAvg. length")
print("{0:.8f}\t{1}".format(self.average_fitness(),
self.average_length_of_gene()))
self.current_generation += 1
self.sort_population()
self.update_tactic_usage()
self.check_proof()
def check_proof(self):
"""Check if there is a proof in population
"""
for gene in self.population:
if gene.is_proof:
print(gene.chromosome)
for state in gene.coq_states:
print(state)
self.proofs.append(Gene(chromosome=gene.valid_tactics))
def update_fitness_for_population(self):
"""
return individual if theorem is proved, o.w return None
"""
def wrapper(func, *args, **kwargs):
"""func wrapper"""
return func, args, kwargs
with Pool(processes=4) as pool:
for gene in self.population:
func, args, kargs = wrapper(gene.update_fitness_for_proof,
self.proof, self.limit_hyp,
self.limit_goal)
pool.apply_async(func(*args, **kargs))
def apply_rules(self):
"""Perform action by rules"""
if len(self.rules) == 0:
return
for gene in self.population:
for rule in self.rules:
if rule.type == "gene":
rule.check_and_apply(gene)
def crossover(self):
"""
the crossover operation for gp
"""
self.sort_population()
elite_amount = round(self.elite_rate * self.population_size)
# preserve from the top
new_population = [ele for ele in self.population if ele.ttl > 0]
for individual in new_population:
if individual.ttl > 0:
individual.ttl -= 1
new_population += self.population[:elite_amount]
while len(new_population) < self.population_size:
# newGene = self.crossBelowCrossRate()
new_gene, new_gene2 = self.cross_on_arb_seq()
if random() <= self.mutate_rate:
self.mutate_append(new_gene)
new_population.append(new_gene)
if len(new_population) == self.population_size:
break
if random() <= self.mutate_rate:
self.mutate_append(new_gene2)
new_population.append(new_gene2)
self.population = new_population
def sort_population(self):
"""
sort population by length and fitness
"""
self.population.sort(key=lambda x: x.fitness, reverse=True)
def update_tactic_usage(self):
"""update tactic statistic usage"""
usage = {e: 0 for e in self.tactics.all_tactics}
count = 0
for gene in self.population:
for tactic in gene.chromosome:
count += 1
try:
usage[tactic] += 1
except KeyError:
usage[tactic] = 1
for tactic in usage:
usage[tactic] = usage[tactic]/count
self.proof.tactics.usage = usage
def cross_below_cross_rate(self):
"""
select two parent by cross rate, crossover on random point
"""
p1_index = randint(0, floor(self.population_size * self.cross_rate)-1)
p2_index = randint(0, floor(self.population_size * self.cross_rate)-1)
gene_of_p1 = self.population[p1_index]
gene_of_p2 = self.population[p2_index]
cross_point = randint(0, int_min(len(gene_of_p1), len(gene_of_p2))-1)
new_chromosome = []
new_chromosome += gene_of_p1.chromosome[:cross_point]
new_chromosome += gene_of_p2.chromosome[cross_point:]
if (self.tactics.is_unrepeatable(new_chromosome[cross_point])
and cross_point < len(new_chromosome)-1):
if new_chromosome[cross_point] == new_chromosome[cross_point+1]:
del new_chromosome[cross_point]
return Gene(chromosome=new_chromosome)
def cross_on_arb_seq(self, slmax=6):
"""
select two parent by cross_rate, crossover by some seqence
"""
p1_index = randint(0, floor(self.population_size * self.cross_rate)-1)
p2_index = randint(0, floor(self.population_size * self.cross_rate)-1)
gene_of_p1 = self.population[p1_index]
gene_of_p2 = self.population[p2_index]
p1_begin = myrandint(0, len(gene_of_p1)-1)
p1_end = p1_begin + myrandint(1, int_min(slmax, len(gene_of_p1)-p1_begin))
p2_begin = myrandint(0, len(gene_of_p2)-1)
p2_end = p2_begin + myrandint(1, int_min(slmax, len(gene_of_p2)-p2_begin))
new_chromosome = []
new_chromosome += gene_of_p1.chromosome[:p1_begin]
new_chromosome += gene_of_p2.chromosome[p2_begin:p2_end]
new_chromosome += gene_of_p1.chromosome[p1_end:]
new_chromosome2 = []
new_chromosome2 += gene_of_p2.chromosome[:p2_begin]
new_chromosome2 += gene_of_p1.chromosome[p1_begin:p1_end]
new_chromosome2 += gene_of_p2.chromosome[p2_end:]
self.remove_repeatable(new_chromosome)
self.remove_repeatable(new_chromosome2)
return Gene(chromosome=new_chromosome), Gene(chromosome=new_chromosome2)
def remove_repeatable(self, chromosome):
"""
remove repeatable tactic
"""
tactic_set = set()
for index, tactic in enumerate(chromosome):
if self.tactics.is_unrepeatable(tactic):
if tactic in tactic_set:
del chromosome[index]
else:
tactic_set.add(tactic)
def mutate(self, gene):
"""
the mutate operation
"""
if len(gene) == 1:
gene.chromosome[0] = self.tactics.mutate_select()
else:
index = randint(len(gene)//2, len(gene)-1)
gene.chromosome[index] = self.tactics.mutate_select()
def mutate_append(self, gene):
"""append a tactic to chromosome"""
gene.chromosome.append(self.tactics.mutate_select())
def average_fitness(self):
"""Calculate the average fitness for population.
Returns:
double: avg. fitness
"""
return sum([e.fitness for e in self.population]) / len(self.population)
def average_length_of_gene(self):
"""Calculate the average fitness for population.
Returns:
double: avg. fitness
"""
return sum([len(e) for e in self.population]) / len(self.population)
def edit(self, index, data=None):
"""Human involved modification of some gene of the population
"""
if self.current_generation > self.max_generation:
return
print("Edit Gene {} now.".format(index))
gene = self.population[index]
gene.modification(data=data)
gene.update_fitness_for_proof(self.proof)
if gene.is_proof:
self.proofs.append(Gene(chromosome=gene.valid_tactics))
return
def show_proofs(self):
"""Show proofs found
"""
if self.proofs:
for gene in self.proofs:
print(gene.format_output(self.proof))
else:
print("There is not proof for now.")
def list(self, argv):
"""List property of some individual.
Args:
argv(list): sub command of list function.
"""
def get_interval(interval):
"""Get begin and end of interval
"""
interval_list = interval.split("-")
if len(interval_list) == 1:
return (int(interval_list[0]), int(interval_list[0])+1)
else:
return (int(interval_list[0]), int(interval_list[1])+1)
if not argv or not argv[0]:
return
(begin, end) = get_interval(argv[0])
if len(argv) == 1:
for index, gene in enumerate(self.population[begin:end]):
print("{0}: {1:.8f}".format(index, gene.fitness))
gene.print_progress()
elif argv[1] == "fitness":
for index, gene in enumerate(self.population[begin:end]):
print("{0}: {1:.8f}".format(index, gene.fitness))
elif argv[1] == "chromosome":
for index, gene in enumerate(self.population[begin:end]):
print("{0}: {1}".format(index, gene.chromosome))
elif argv[1] == "ttl":
for index, gene in enumerate(self.population[begin:end]):
print("{0}: {1}".format(index, gene.ttl))
def read_rule_from_file(self, file_name):
"""Read a rule from a JSON file"""
try:
self.rules.append(GeneRule(file_name=file_name, proof=self.proof))
except FileNotFoundError:
return
def delete_rule(self, index):
"""delete a rule from rule_list"""
del self.rules[index]
def remove_tactic(self):
"""remove a tactic in tactic_set and population"""
tactic_removed = input("Enter a tactic to be removed: ")
self.proof.tactics.remove(tactic_removed)
for gene in self.population:
gene.chromosome = [e for e in gene.chromosome if e != tactic_removed]
def set_attributes(self, argv):
"""Set attributes of population
"""
if argv[0] == "population" or argv[0] == "pop":
if argv[1] == "ttl":
self.population[int(argv[2])].ttl = int(argv[3])
def defrag(self, index_list):
"""Defrag some gene"""
for index in index_list:
self.population[index].defrag(self.proof)
def print_stats(self):
"""print tactic usage"""
sorted_stats = sorted(self.proof.tactics.usage.items(),
key=operator.itemgetter(1), reverse=True)
for tactic, usage in sorted_stats:
print("{0}: {1:.4f}%".format(tactic, usage*100))
def myrandint(begin, end):
"""
randint warrper for begin == end
"""
if begin == end:
return begin
else:
return randint(begin, end)
def int_max(int_a, int_b):
"""
max(a, b)
"""
if int_a > int_b:
return int_a
else:
return int_b
def int_min(int_a, int_b):
"""
min(a, b)
"""
if int_a < int_b:
return int_a
else:
return int_b
|
StarcoderdataPython
|
11395929
|
import jaydebeapi
import sys
argv = sys.argv
multicast_address = argv[1] # default : 192.168.127.12
port_no = argv[2] # default : 41999
cluster_name = argv[3]
username = argv[4]
password = argv[5]
url = "jdbc:gs://" + multicast_address + ":" + port_no + "/" + cluster_name
conn = jaydebeapi.connect("com.toshiba.mwcloud.gs.sql.Driver", url, [username, password], "./gridstore-jdbc.jar")
curs = conn.cursor()
curs.execute("DROP TABLE IF EXISTS Emotion")
curs.execute("CREATE TABLE IF NOT EXISTS Emotion ( name text, create_date date, created_at timestamp)")
print('SQL Create Table name=Emotion')
curs.execute("CREATE TABLE IF NOT EXISTS User ( email_address text, created_at timestamp)")
print('SQL Create Table name=User')
curs.close()
conn.close()
print('success!')
|
StarcoderdataPython
|
1829830
|
import re
from typing import Any
from typing import Dict
from email_validator import EmailNotValidError
from email_validator import validate_email
from simpleeval import EvalWithCompoundTypes as Evaluator
from simpleeval import InvalidExpression
from .questions import Validator
class ValidationError(Exception):
"""
Validation error exception, for use by Form.update_object.
"""
def text_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Validate length of a text value, and whether digits are allowed.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
max_length = int(validator.max_length)
min_length = int(validator.min_length)
allow_digits = validator.allow_digits
result = True
value = str(value)
length = len(value)
if length < min_length or max_length > 0 and length > max_length:
result = False
if not allow_digits:
for number in range(10):
if str(number) in value:
result = False
return result
def numeric_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Validate if number value is within set limits.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
max_value = float(validator.max_value)
min_value = float(validator.min_value)
result = True
value = float(value)
if value < min_value or max_value > 0 and value > max_value:
result = False
return result
def email_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Validate if value is a valid email address.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
result = True
try:
validate_email(value)
except EmailNotValidError:
result = False
return result
def regex_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Validate if value matches regular expression.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
regex = validator.regex
regex = re.compile(regex)
return regex.match(value) is not None
def expression_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Validate if expression associated with value is true.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
expression = validator.expression
result = True
if expression:
expression = expression.replace("{", "").replace("}", "")
expression = expression.replace(" empty", " in [[], {}]")
expression = expression.replace(" notempty", " not in [[], {}]")
expression = expression.replace(" anyof ", " in ")
try:
evaluator = Evaluator(names=form_data)
result = evaluator.eval(expression) is True
except (ValueError, TypeError, SyntaxError, KeyError, InvalidExpression):
result = False
return result
VALIDATORS = {
"text": text_validator,
"numeric": numeric_validator,
"email": email_validator,
"regex": regex_validator,
"expression": expression_validator,
}
def call_validator(validator: Validator, value: Any, form_data: Dict[str, Any]):
"""Call correct validation method depending on validator type.
:param validator:
The validator instance for the current question.
:param value:
The value to be validated.
:param form_data:
The dictionary containing from data entered for current form.
:Returns:
If validation passes, :data:`True`, else :data:`False`.
"""
validator_method = VALIDATORS[validator.kind]
return validator_method(validator, value, form_data)
|
StarcoderdataPython
|
4836293
|
<reponame>Slovty/py-study
#!/usr/bin/python
print("你好,世界")
|
StarcoderdataPython
|
5183398
|
<filename>services/restapi/rest/apps.py
from django.apps import AppConfig
class RestkoConfig(AppConfig):
name = 'rest'
|
StarcoderdataPython
|
3505640
|
from random import randint, choice
import numpy as np
# from matplotlib import pyplot as #plt
from copy import deepcopy
from sklearn import preprocessing
import torch
from tqdm import tqdm
mima = preprocessing.MinMaxScaler()
class Entropy(object):
def normalize(self, a):
return mima.fit_transform(a)
def rotationmatrix(self, degrees):
theta = np.radians(degrees)
c, s = np.cos(theta), np.sin(theta)
return np.array(((c, -s), (s, c)))
def koch(self, order):
sideLength = 100/(3**order)
startpos = np.array([0,0])
pointer = np.array([0, 0])
richting = np.array([1, 0])
positions = [startpos]
koch_flake = "FRFRF"
for i in range(order):
koch_flake = koch_flake.replace("F", "FLFRFLF")
for i, move in enumerate(koch_flake):
if move == "F":
pointer = pointer + richting * sideLength
elif move == "L":
richting = np.dot(self.rotationmatrix(60),richting)
positions.append(pointer)
elif move == "R":
richting = np.dot(self.rotationmatrix(-120), richting)
positions.append(pointer)
positions= self.normalize(positions)
if (self.show):
print(len(positions))
positions = np.concatenate((positions, np.array([positions[0]])), axis=0)
##plt.scatter(positions[:,0], positions[:,1])
##plt.show()
#plt.plot(positions[:,0], positions[:,1])
#plt.show()
else:
return [list([float(y) for y in x]) for x in positions]
def koch2(self, order):
sideLength = 100/(3**order)
startpos = np.array([0,0])
pointer = np.array([0, 0])
richting = np.array([1, 0])
positions = [startpos]
koch_flake = "FRFRF"
for i in range(order):
koch_flake = koch_flake.replace("F", "FLFFLLFLLFFLF")
for i, move in enumerate(koch_flake):
if move == "F":
pointer = pointer + richting * sideLength
elif move == "L":
richting = np.dot(self.rotationmatrix(60),richting)
positions.append(pointer)
elif move == "R":
richting = np.dot(self.rotationmatrix(-120), richting)
positions.append(pointer)
positions= self.normalize(positions)
if (self.show):
print (len(positions))
positions = np.concatenate((positions, np.array([positions[0]])), axis=0)
#plt.plot(positions[:,0], positions[:,1])
#plt.show()
else:
return [list([float(y) for y in x]) for x in positions]
def koch3(self, order):
sideLength = 100/(3**order)
startpos = np.array([0,0])
pointer = np.array([0, 0])
richting = np.array([1, 0])
positions = [startpos]
koch_flake = "FRFRFRF"
for i in range(order):
if(i%2 ==1):
koch_flake = koch_flake.replace("F", "FLFRFRFLF")
else:
koch_flake = koch_flake.replace("F", "FRFLFLFRF")
for i, move in enumerate(koch_flake):
if move == "F":
pointer = pointer + richting * sideLength
elif move == "L":
richting = np.dot(self.rotationmatrix(90),richting)
positions.append(pointer)
elif move == "R":
richting = np.dot(self.rotationmatrix(-90), richting)
positions.append(pointer)
positions= self.normalize(positions)
if (self.show):
print(len(positions))
positions = np.concatenate((positions, np.array([positions[0]])), axis=0)
##plt.plot(positions[:,0], positions[:,1])
##plt.show()
else:
return [list([float(y) for y in x]) for x in positions]
def __init__(self, show=False):
self.show = show
self.locations = {}
for i, func in enumerate([self.koch, self.koch2, self.koch3]):
self.locations[i] = []
for x in range(5):
self.locations[i].append(func(x))
e = Entropy(show=False)
class tsp_instance(object):
def __init__(self, order, entropydegree=0.0):
self.order = order
self.entropy = entropydegree
self.locations = []
pickKoch = 0#randint(0,len(e.locations)-1)
# kochline = deepcopy(e.locations[pickKoch])
kochline = [[a for a in b] for b in e.locations[pickKoch]]
# print kochline
toPick = order
bestLine = 0
while (order>len(kochline[bestLine])):
bestLine += 1
pickedLine = kochline[bestLine]
while (toPick > 0):
elem = pickedLine.pop(randint(0,len(pickedLine)-1))
self.locations.append(elem)
toPick -= 1
self.locations = np.array(self.locations)
self.shake()
def normalize(self, a):
return mima.fit_transform(a)
def noisify(self, pure, amount):
noise = np.random.normal(0, 1, pure.shape)*(amount*0.15)
temp = pure+noise
temp2 = np.dot(temp, self.rotationmatrix(randint(0,359)))
return self.normalize(temp2)
def rotationmatrix(self, degrees):
theta = np.radians(degrees)
c, s = np.cos(theta), np.sin(theta)
return np.array(((c, -s), (s, c)))
def shake(self):
self.locations = self.noisify(self.locations, self.entropy)
# print (self.locations)
# ##plt.plot(self.locations[:, 0], self.locations[:, 1])
# ##plt.show()
def getTensor(self):
return torch.FloatTensor(self.locations)
class tsp_batch(object):
def __init__(self, order, entropydegree, size):
self.data = []
for _ in tqdm((a for a in range(size)), total=size):
sample = randint(0, 100)
sampledescision = ((sample/100.0) >= entropydegree)
if (sampledescision):
self.data.append(tsp_instance(order=order, entropydegree=entropydegree).getTensor())
else:
self.data.append(torch.FloatTensor(order, 2).uniform_(0, 1))
def getall(self):
return self.data
def __len__(self):
return len(self.data)
def __getitem__(self, item):
return self.data[item]
|
StarcoderdataPython
|
5185714
|
#!/usr/bin/env python
import re
# Helper functions to construct raw regular expressions "strings" (actually byte strings)
def group(content: bytes) -> bytes:
return rb"[" + content + rb"]"
def named_regex_group(name: str, content: bytes) -> bytes:
group_start = rb"(?P<" + name.encode("ascii") + rb">"
group_end = rb")"
return rb"".join((group_start, content, group_end))
def not_preceded_by(preceding: bytes, actual: bytes) -> bytes:
return rb"(?<!" + preceding + rb")" + actual
def not_followed_by(preceding: bytes, actual: bytes) -> bytes:
return rb"(?<!" + preceding + rb")" + actual
def no_capture(content: bytes) -> bytes:
return rb"(?:" + content + rb")"
# Raw regular expression "strings"" (actually byte strings)
_control_characters = rb"\\\{\}"
_newline = b"\\" + rb"r" + b"\\" + rb"n"
control_character = group(_control_characters)
not_control_character = group(rb"^" + _control_characters)
_control_characters_or_newline = _control_characters + _newline
control_character_or_newline = group(_control_characters + _newline)
not_control_character_or_newline = group(rb"^" + _control_characters_or_newline)
rtf_backslash = named_regex_group("backslash", not_preceded_by(rb"\\", rb"\\"))
unnamed_rtf_backslash = not_preceded_by(rb"\\", rb"\\")
_letters = rb"a-zA-Z"
ascii_letters = group(_letters) + rb"{1,32}"
_digits = rb"0-9"
_hdigits = rb"0-9a-f"
ignorable = named_regex_group("ignorable", rb"\\\*")
rtf_brace_open = named_regex_group("group_start", not_preceded_by(unnamed_rtf_backslash, rb"\{") + ignorable + rb"?")
rtf_brace_close = named_regex_group("group_end", not_preceded_by(unnamed_rtf_backslash, rb"\}"))
minus = named_regex_group("minus", rb"-?")
digit = named_regex_group("digit", minus + group(_digits) + rb"{1,10}")
hdigit = named_regex_group("hdigit", group(_hdigits))
# int16 = minus + digit + rb"{1,5}"
parameter_pattern = named_regex_group("parameter", digit)
space = named_regex_group("space", rb" ")
newline = named_regex_group("newline", _newline)
other = named_regex_group("other", group(rb"^" + _letters + _digits))
ascii_letter_sequence = named_regex_group("control_name", ascii_letters + parameter_pattern + rb"?")
delimiter = named_regex_group("delimiter", rb"|".join((space, newline, other, rb"$")))
symbol = named_regex_group("symbol", other)
control_word_pattern = named_regex_group("control_word", rtf_backslash + ascii_letter_sequence + delimiter)
pcdata_delimiter = no_capture(rb"|".join((rtf_brace_open, rtf_brace_close, control_word_pattern)))
plain_text_pattern = named_regex_group("text", not_control_character_or_newline + rb"+") + no_capture(rb"|".join((control_character_or_newline, rb"$")))
probe_pattern = rb".."
class Bytes_Regex():
"""
This wraps `re.pattern` objects and gives them a method `regex101` which
prints out the pattern in such a manner that it can be copy-pasted
to regex101.com.
"""
def __init__(self, Bytes: bytes, flags:re.RegexFlag=0) -> None:
self.pattern_bytes = Bytes
self.pattern = re.compile(Bytes, flags)
self.match = self.pattern.match
def regex101(self) -> None:
print(self.pattern_bytes.decode("ascii"))
meaningful_bs = Bytes_Regex(rtf_backslash)
probe = Bytes_Regex(named_regex_group("probe", probe_pattern), flags=re.DOTALL)
parameter = Bytes_Regex(parameter_pattern)
control_word = Bytes_Regex(control_word_pattern)
control_symbol = Bytes_Regex(rtf_backslash + symbol)
group_start = Bytes_Regex(rtf_brace_open)
group_end = Bytes_Regex(rtf_brace_close)
plain_text = Bytes_Regex(plain_text_pattern)
raw_pcdata = Bytes_Regex(named_regex_group("pcdata", rb".*?") + pcdata_delimiter, flags=re.DOTALL)
raw_sdata = Bytes_Regex(named_regex_group("sdata", group(_hdigits + rb"\r\n") + rb"+"), flags=re.DOTALL)
|
StarcoderdataPython
|
74253
|
from .DistributedMinigameAI import *
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from direct.fsm import ClassicFSM
from direct.fsm import State
from direct.actor import Actor
from . import DivingGameGlobals
import random
import random
import types
class DistributedDivingGameAI(DistributedMinigameAI):
fishProportions = []
for i in range(6):
fishProportions.append([])
n = 100
fishProportions[0]
fishProportions[0].append(([0, 0.8],
[0.8, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[0].append(([0, 0.8],
[0.8, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[0].append(([0, 0.7],
[0.7, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[0].append(([0, 0.7],
[0.7, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[0].append(([0, 0.5],
[0.5, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[0].append(([n, 0.5],
[0.5, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[1]
fishProportions[1].append(([0, 0.8],
[0.8, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[1].append(([0, 0.8],
[0.8, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[1].append(([0, 0.7],
[0.7, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[1].append(([0, 0.7],
[0.7, 0.9],
[n, n],
[n, n],
[n, n],
[0.9, 1]))
fishProportions[1].append(([0, 0.4],
[0.4, 0.8],
[n, n],
[n, n],
[n, n],
[0.8, 1]))
fishProportions[1].append(([n, 0.3],
[0.3, 0.6],
[n, n],
[n, n],
[n, n],
[0.6, 1]))
fishProportions[2]
fishProportions[2].append(([0, 0.7],
[0.7, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[2].append(([0, 0.6],
[0.6, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[2].append(([0, 0.6],
[0.6, 0.8],
[n, n],
[0.8, 1],
[n, n],
[n, n]))
fishProportions[2].append(([0, 0.5],
[0.5, 0.7],
[n, n],
[0.7, 0.9],
[n, n],
[0.9, 1]))
fishProportions[2].append(([0, 0.2],
[0.2, 0.4],
[n, n],
[0.4, 0.75],
[n, n],
[0.75, 1]))
fishProportions[2].append(([n, 0.2],
[0.2, 0.6],
[n, n],
[0.6, 0.8],
[n, n],
[0.8, 1]))
fishProportions[3]
fishProportions[3].append(([0, 0.7],
[0.7, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[3].append(([0, 0.6],
[0.6, 1],
[n, n],
[n, n],
[n, n],
[n, n]))
fishProportions[3].append(([0, 0.6],
[0.6, 0.8],
[n, n],
[0.95, 1],
[n, n],
[n, n]))
fishProportions[3].append(([0, 0.5],
[0.5, 0.7],
[n, n],
[0.7, 0.85],
[0.9, 0.95],
[0.95, 1]))
fishProportions[3].append(([0, 0.2],
[0.2, 0.4],
[n, n],
[0.4, 0.75],
[0.75, 0.85],
[0.85, 1]))
fishProportions[3].append(([n, 0.2],
[0.2, 0.6],
[n, n],
[0.6, 0.8],
[n, n],
[0.8, 1]))
fishProportions[4]
fishProportions[4].append(([0, 0.7],
[0.7, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[4].append(([0, 0.45],
[0.45, 0.9],
[n, n],
[0.9, 1],
[n, n],
[n, n]))
fishProportions[4].append(([0, 0.2],
[0.2, 0.5],
[n, n],
[0.5, 0.95],
[0.95, 1],
[n, n]))
fishProportions[4].append(([0, 0.1],
[0.1, 0.3],
[n, n],
[0.3, 0.75],
[0.75, 0.8],
[0.8, 1]))
fishProportions[4].append(([n, n],
[0, 0.15],
[n, n],
[0.15, 0.4],
[n, n],
[0.4, 1]))
fishProportions[4].append(([n, n],
[n, n],
[n, n],
[0, 0.4],
[n, n],
[0.6, 1]))
fishProportions[5]
fishProportions[5].append(([0, 0.7],
[0.7, 0.9],
[0.9, 1],
[n, n],
[n, n],
[n, n]))
fishProportions[5].append(([0, 0.45],
[0.45, 0.9],
[n, n],
[0.9, 1],
[n, n],
[n, n]))
fishProportions[5].append(([0, 0.2],
[0.2, 0.5],
[n, n],
[0.5, 0.95],
[0.95, 1],
[n, n]))
fishProportions[5].append(([0, 0.1],
[0.1, 0.3],
[n, n],
[0.3, 0.75],
[0.75, 0.8],
[0.8, 1]))
fishProportions[5].append(([n, n],
[0, 0.15],
[n, n],
[0.15, 0.4],
[n, n],
[0.4, 1]))
fishProportions[5].append(([n, n],
[n, n],
[n, n],
[0, 0.4],
[n, n],
[0.6, 1]))
difficultyPatternsAI = {ToontownGlobals.ToontownCentral: [3.5, fishProportions[0], 1.5],
ToontownGlobals.DonaldsDock: [3.0, fishProportions[1], 1.8],
ToontownGlobals.DaisyGardens: [2.5, fishProportions[2], 2.1],
ToontownGlobals.MinniesMelodyland: [2.0, fishProportions[3], 2.4],
ToontownGlobals.TheBrrrgh: [2.0, fishProportions[4], 2.7],
ToontownGlobals.DonaldsDreamland: [1.5, fishProportions[5], 3.0]}
def __init__(self, air, minigameId):
try:
self.DistributedDivingGameAI_initialized
except:
self.DistributedDivingGameAI_initialized = 1
DistributedMinigameAI.__init__(self, air, minigameId)
self.gameFSM = ClassicFSM.ClassicFSM('DistributedDivingGameAI', [State.State('inactive', self.enterInactive, self.exitInactive, ['swimming']), State.State('swimming', self.enterSwimming, self.exitSwimming, ['cleanup']), State.State('cleanup', self.enterCleanup, self.exitCleanup, ['inactive'])], 'inactive', 'inactive')
self.addChildGameFSM(self.gameFSM)
self.__timeBase = globalClockDelta.localToNetworkTime(globalClock.getRealTime())
def delete(self):
self.notify.debug('delete')
del self.gameFSM
DistributedMinigameAI.delete(self)
def setGameReady(self):
self.notify.debug('setGameReady')
self.sendUpdate('setTrolleyZone', [self.trolleyZone])
for avId in list(self.scoreDict.keys()):
self.scoreDict[avId] = 0
self.treasureHolders = [0] * self.numPlayers
self.SPAWNTIME = self.difficultyPatternsAI[self.getSafezoneId()][0]
self.proportion = self.difficultyPatternsAI[self.getSafezoneId()][1]
self.REWARDMOD = self.difficultyPatternsAI[self.getSafezoneId()][2]
DistributedMinigameAI.setGameReady(self)
self.spawnings = []
for i in range(DivingGameGlobals.NUM_SPAWNERS):
self.spawnings.append(Sequence(Func(self.spawnFish, i), Wait(self.SPAWNTIME + random.random()), Func(self.spawnFish, i), Wait(self.SPAWNTIME - 0.5 + random.random())))
self.spawnings[i].loop()
def setGameStart(self, timestamp):
self.notify.debug('setGameStart')
DistributedMinigameAI.setGameStart(self, timestamp)
self.gameFSM.request('swimming')
self.scoreTracking = {}
for avId in list(self.scoreDict.keys()):
self.scoreTracking[avId] = [0,
0,
0,
0,
0]
def getCrabMoving(self, crabId, crabX, dir):
timestamp = globalClockDelta.getFrameNetworkTime()
rand1 = int(random.random() * 10)
rand2 = int(random.random() * 10)
self.sendUpdate('setCrabMoving', [crabId,
timestamp,
rand1,
rand2,
crabX,
dir])
def treasureRecovered(self):
if not hasattr(self, 'scoreTracking'):
return
avId = self.air.getAvatarIdFromSender()
if avId not in self.avIdList:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.treasureRecovered: invalid avId')
return
if avId not in self.treasureHolders:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.treasureRecovered: tried to recover without holding treasure')
return
self.treasureHolders[self.treasureHolders.index(avId)] = 0
timestamp = globalClockDelta.getFrameNetworkTime()
newSpot = int(random.random() * 30)
self.scoreTracking[avId][4] += 1
for someAvId in list(self.scoreDict.keys()):
if someAvId == avId:
self.scoreDict[avId] += 10 * (self.REWARDMOD * 0.25)
self.scoreDict[someAvId] += 10 * (self.REWARDMOD * 0.75 / float(len(list(self.scoreDict.keys()))))
self.sendUpdate('incrementScore', [avId, newSpot, timestamp])
def hasScoreMult(self):
return 0
def setGameAbort(self):
self.notify.debug('setGameAbort')
taskMgr.remove(self.taskName('gameTimer'))
if self.gameFSM.getCurrentState():
self.gameFSM.request('cleanup')
DistributedMinigameAI.setGameAbort(self)
def gameOver(self):
self.notify.debug('gameOver')
self.gameFSM.request('cleanup')
DistributedMinigameAI.gameOver(self)
trackingString = 'MiniGame Stats : Diving Game'
trackingString += '\nDistrict:%s' % self.getSafezoneId()
for avId in list(self.scoreTracking.keys()):
trackingString = trackingString + '\navId:%s fishHits:%s crabHits:%s treasureCatches:%s treasureDrops:%s treasureRecoveries:%s Score: %s' % (avId,
self.scoreTracking[avId][0],
self.scoreTracking[avId][1],
self.scoreTracking[avId][2],
self.scoreTracking[avId][3],
self.scoreTracking[avId][4],
self.scoreDict[avId])
self.air.writeServerEvent('MiniGame Stats', None, trackingString)
return
def enterInactive(self):
self.notify.debug('enterInactive')
def exitInactive(self):
pass
def getTimeBase(self):
return self.__timeBase
def enterSwimming(self):
self.notify.debug('enterSwimming')
duration = 65.0
taskMgr.doMethodLater(duration, self.timerExpired, self.taskName('gameTimer'))
def timerExpired(self, task):
self.notify.debug('timer expired')
for avId in list(self.scoreDict.keys()):
if self.scoreDict[avId] < 5:
self.scoreDict[avId] = 5
self.gameOver()
return Task.done
def exitSwimming(self):
for i in range(DivingGameGlobals.NUM_SPAWNERS):
self.spawnings[i].pause()
def enterCleanup(self):
self.notify.debug('enterCleanup')
for i in range(DivingGameGlobals.NUM_SPAWNERS):
self.spawnings[i].finish()
del self.spawnings
self.gameFSM.request('inactive')
def exitCleanup(self):
pass
def pickupTreasure(self, chestId):
if not hasattr(self, 'scoreTracking'):
return
timestamp = globalClockDelta.getFrameNetworkTime()
avId = self.air.getAvatarIdFromSender()
if avId not in self.avIdList:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.pickupTreasure: invalid avId')
return
if avId in self.treasureHolders:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.pickupTreasure: already holding treasure')
return
if not (0 <= chestId < len(self.treasureHolders)):
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.pickupTreasure: invalid chest requested (#%d)' % chestId)
return
if self.treasureHolders[chestId]:
# This chest is already held by someone else. Because this can happen
# during normal play (race conditions if two Toons swim into the treasure
# simultaneously) we do not log a suspicious event and silently ignore it.
return
self.scoreTracking[avId][2] += 1
self.treasureHolders[chestId] = avId
self.sendUpdate('setTreasureGrabbed', [avId, chestId])
def spawnFish(self, spawnerId):
timestamp = globalClockDelta.getFrameNetworkTime()
props = self.proportion[spawnerId]
num = random.random()
for i in range(len(props)):
prop = props[i]
low = prop[0]
high = prop[1]
if num > low and num <= high:
offset = int(10 * random.random())
self.sendUpdate('fishSpawn', [timestamp,
i,
spawnerId,
offset])
return
def handleCrabCollision(self, avId, status):
if avId not in self.avIdList:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.handleCrabCollision: invalid avId')
return
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('setTreasureDropped', [avId, timestamp])
self.scoreTracking[avId][1] += 1
if status == 'normal' or status == 'treasure':
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('performCrabCollision', [avId, timestamp])
if status == 'treasure':
if avId in self.treasureHolders:
self.treasureHolders[self.treasureHolders.index(avId)] = 0
self.scoreTracking[avId][3] += 1
else:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.handleCrabCollision: reported "treasure drop" without holding treasure')
def handleFishCollision(self, avId, spawnId, spawnerId, status):
if avId not in self.avIdList:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.handleFishCollision: invalid avId')
return
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('setTreasureDropped', [avId, timestamp])
timestamp = globalClockDelta.getFrameNetworkTime()
self.scoreTracking[avId][0] += 1
if status == 'treasure':
if avId in self.treasureHolders:
self.treasureHolders[self.treasureHolders.index(avId)] = 0
self.scoreTracking[avId][3] += 1
else:
self.air.writeServerEvent('suspicious', avId, 'DivingGameAI.handleFishCollision: reported "treasure drop" without holding treasure')
self.sendUpdate('performFishCollision', [avId,
spawnId,
spawnerId,
timestamp])
|
StarcoderdataPython
|
11302214
|
<filename>python/pacific_atlantic_water_flow.py
'''
Given an m x n matrix of non-negative integers representing the height of each unit cell in a continent, the "Pacific ocean" touches the left and top edges of the matrix and the "Atlantic ocean" touches the right and bottom edges.
Water can only flow in four directions (up, down, left, or right) from a cell to another one with height equal or lower.
Find the list of grid coordinates where water can flow to both the Pacific and Atlantic ocean.
Note:
The order of returned grid coordinates does not matter.
Both m and n are less than 150.
Example:
Given the following 5x5 matrix:
Pacific ~ ~ ~ ~ ~
~ 1 2 2 3 (5) *
~ 3 2 3 (4) (4) *
~ 2 4 (5) 3 1 *
~ (6) (7) 1 4 5 *
~ (5) 1 1 2 4 *
* * * * * Atlantic
Return:
[[0, 4], [1, 3], [1, 4], [2, 2], [3, 0], [3, 1], [4, 0]] (positions with parentheses in above matrix).
'''
class Solution(object):
def pacificAtlantic(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
output = list()
if not matrix:
return output
self.directions = [ (0,1), (0,-1), (1,0), (-1,0) ]
row = len(matrix)
col = len(matrix[0])
p_visited = [ [False] * col for _ in range(row) ]
a_visited = [ [False] * col for _ in range(row) ]
for r in range(row):
self.dfs(matrix, r, 0, p_visited, row, col)
self.dfs(matrix, r, col-1, a_visited, row, col)
for c in range(col):
self.dfs(matrix, 0, c, p_visited, row, col)
self.dfs(matrix, row-1, c, a_visited, row, col)
for r in range(row):
for c in range(col):
if p_visited[r][c] and a_visited[r][c]:
output.append( (r, c) )
return output
def dfs(self, matrix, r, c, visited, row, col):
visited[r][c] = True
for dir in self.directions:
next_r, next_c = r + dir[0], c + dir[1]
if next_r < 0 or next_r >= row or \
next_c < 0 or next_c >= col or \
visited[next_r][next_c] or matrix[next_r][next_c] < matrix[r][c]:
continue
self.dfs(matrix, next_r, next_c, visited, row, col)
solution = Solution()
matrix = [[1,2,2,3,5], [3,2,3,4,4], [2,4,5,3,1], [6,7,1,4,5], [5,1,1,2,4]]
matrix = [[1,1], [1,1], [1,1]]
print solution.pacificAtlantic(matrix)
|
StarcoderdataPython
|
8067396
|
<filename>fastinference/inference/__init__.py
from .inference import *
from .text import *
|
StarcoderdataPython
|
3326902
|
import enum
import json
import importlib
import subprocess
import shutil
from pathlib import Path
import os
from typing import Tuple, Iterable
import antlr4 # type: ignore
from . import util
class FormatType(str, enum.Enum):
s_expr = "s-expr"
json = "json"
def format_token(self, token: antlr4.Token) -> str:
lexer = token.getTokenSource()
token_type_map = {
symbolic_id + len(lexer.literalNames) - 1: symbolic_name
for symbolic_id, symbolic_name in enumerate(lexer.symbolicNames)
}
type_ = token_type_map.get(token.type, "literal")
if self == FormatType.s_expr:
return f"({type_} {json.dumps(token.text)})"
elif self == FormatType.json:
return json.dumps(
dict(
type=type_,
text=token.text,
line=token.line,
column=token.column,
)
)
else:
raise NotImplementedError("Format type {self!s} is not implemented")
def sibling_sep(self) -> str:
if self == FormatType.s_expr:
return " "
elif self == FormatType.json:
return ","
else:
raise NotImplementedError()
def enter_rule(self, ctx: antlr4.ParserRuleContext) -> str:
rule_name = ctx.parser.ruleNames[ctx.getRuleIndex()]
if self == FormatType.s_expr:
return f"({rule_name}"
elif self == FormatType.json:
return f'{{"rule": {json.dumps(rule_name)}, "children": ['
else:
raise NotImplementedError()
def exit_rule(self) -> str:
if self == FormatType.s_expr:
return f")"
elif self == FormatType.json:
return f"]}}"
else:
raise NotImplementedError()
def get_cache_path() -> Path:
path = (
Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
/ "antlr4-python-grun"
)
path.mkdir(parents=True, exist_ok=True)
return path
def get_antlr_jar() -> Path:
antlr_jar_path = get_cache_path() / "antlr-4.8-complete.jar"
if not antlr_jar_path.exists():
antlr_url = "https://www.antlr.org/download/antlr-4.8-complete.jar"
util.download(antlr_url, antlr_jar_path)
return antlr_jar_path
def compile(grammar_path: Path) -> Path:
name = grammar_path.stem
build_dir = get_cache_path() / name
antlr_jar = get_antlr_jar()
representative_file = build_dir / f"{name}Lexer.py"
if (
not representative_file.exists()
or representative_file.stat().st_mtime < grammar_path.stat().st_mtime
):
if build_dir.exists():
shutil.rmtree(build_dir)
build_dir.mkdir()
subprocess.run(
[
"java",
"-jar",
str(antlr_jar),
"-o",
str(build_dir),
"-Dlanguage=Python3",
"-no-listener",
str(grammar_path.name),
],
check=True,
cwd=grammar_path.parent,
)
return build_dir
def get_lexer_parser(grammar_path: Path) -> Tuple[antlr4.Lexer, antlr4.Parser]:
build_dir = compile(grammar_path)
name = grammar_path.stem
with util.sys_path_prepend([build_dir]):
return (
getattr(
importlib.import_module(f"{name}Lexer", package=None),
f"{name}Lexer",
),
getattr(
importlib.import_module(f"{name}Parser", package=None),
f"{name}Parser",
),
)
def tokenize(
grammar: Path,
input: Path,
) -> Iterable[str]:
Lexer, _ = get_lexer_parser(grammar)
input_stream = antlr4.FileStream(input)
lexer = Lexer(input_stream)
while True:
token = lexer.nextToken()
yield token
if token.type == token.EOF:
break
def parse(
grammar: Path,
initial_rule: str,
input: Path,
) -> antlr4.ParserRuleContext:
Lexer, Parser = get_lexer_parser(grammar)
input_stream = antlr4.FileStream(input)
lexer = Lexer(input_stream)
stream = antlr4.CommonTokenStream(lexer)
parser = Parser(stream)
return getattr(parser, initial_rule)()
def format_tree(
root: antlr4.ParserRuleContext,
pretty: bool,
format: FormatType,
) -> str:
stack = [(root, True)]
depth = 0
buf = []
while stack:
node, last = stack.pop()
if node == "end":
depth -= 1
buf.append(depth * " " if pretty else "")
buf.append(format.exit_rule())
if not last:
buf.append(format.sibling_sep())
elif isinstance(node, antlr4.tree.Tree.TerminalNodeImpl):
buf.append(depth * " " if pretty else "")
buf.append(format.format_token(node.symbol))
if not last:
buf.append(format.sibling_sep())
else:
buf.append(depth * " " if pretty else "")
buf.append(format.enter_rule(node))
depth += 1
children = [] if node.children is None else node.children
stack.append(("end", last))
stack.extend(util.first_sentinel(list(children)[::-1]))
if pretty:
buf.append("\n")
return "".join(buf)
|
StarcoderdataPython
|
5064419
|
<reponame>drcsturm/project-euler
# Euler discovered the remarkable quadratic formula:
# n2+n+41
# It turns out that the formula will produce 40 primes for the consecutive integer values 0≤n≤39
# . However, when n=40,402+40+41=40(40+1)+41 is divisible by 41, and certainly when n=41,412+41+41
# is clearly divisible by 41.
# The incredible formula n2−79n+1601
# was discovered, which produces 80 primes for the consecutive values 0≤n≤79
# . The product of the coefficients, −79 and 1601, is −126479.
# Considering quadratics of the form:
# n2+an+b
# , where |a|<1000 and |b|≤1000
# where |n|
# is the modulus/absolute value of n
# e.g. |11|=11 and |−4|=4
# Find the product of the coefficients, a
# and b, for the quadratic expression that produces the maximum number of primes for consecutive values of n, starting with n=0.
from utils.primes import is_prime
rangea = 1000
rangeb = 1001
max_n = 0
for a in range(-rangea, rangea):
for b in range(-rangeb, rangeb):
n = 0
while True:
num = n**2 + a * n + b
if not is_prime(num):
if n >= max_n:
max_n = n
max_a = a
max_b = b
# print(max_a, max_b, max_n)
break
n += 1
# print(max_a, max_b, max_n)
print(max_a * max_b)
|
StarcoderdataPython
|
8162118
|
<filename>continuum/data_utils.py
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import torch
import os
from torchvision import datasets, transforms
import numpy as np
import imageio
from .datasets.LSUN import load_LSUN
from .datasets.cifar10 import load_Cifar10
from .datasets.cifar100 import load_Cifar100
from .datasets.core50 import load_core50
from .datasets.fashion import Fashion
from .datasets.kmnist import Kmnist
def get_images_format(dataset):
if dataset == 'MNIST' or dataset == 'fashion' or dataset == 'mnishion' or "mnist" in dataset:
imageSize = 28
img_channels = 1
elif dataset == 'cifar10' or dataset == 'cifar100':
imageSize = 32
img_channels = 3
elif dataset == 'core10' or dataset == 'core50':
# if args.imageSize is at default value we change it to 128
imageSize = 128
img_channels = 3
else:
raise Exception("[!] There is no option for " + dataset)
return imageSize, img_channels
def check_args(args):
if "mnist_fellowship" in args.task:
args.dataset = "mnist_fellowship"
if 'merge' in args.task:
args.dataset = "mnist_fellowship_merge"
return args
def check_and_Download_data(folder, dataset, scenario):
# download data if possible
if dataset == 'MNIST' or dataset == 'mnishion' or "mnist_fellowship" in scenario:
datasets.MNIST(folder, train=True, download=True, transform=transforms.ToTensor())
if dataset == 'fashion' or dataset == 'mnishion' or "mnist_fellowship" in scenario:
Fashion(os.path.join(folder, "fashion"), train=True, download=True, transform=transforms.ToTensor())
# download data if possible
if dataset == 'kmnist' or "mnist_fellowship" in scenario:
Kmnist(os.path.join(folder, "kmnist"), train=True, download=True, transform=transforms.ToTensor())
if dataset == 'core50' or dataset == 'core10':
if not os.path.isdir(folder):
print('This dataset should be downloaded manually')
def load_data(dataset, path2data, train=True):
if dataset == 'cifar10':
path2data = os.path.join(path2data, dataset, "processed")
x_, y_ = load_Cifar10(path2data, train)
x_ = x_.float()
elif dataset == 'cifar100':
path2data = os.path.join(path2data, dataset, "processed")
x_, y_ = load_Cifar100(path2data, train)
x_ = x_.float()
elif dataset == 'LSUN':
x_, y_ = load_LSUN(path2data, train)
x_ = x_.float()
elif dataset == 'core50' or dataset == 'core10':
x_, y_ = load_core50(dataset, path2data, train)
elif 'mnist_fellowship' in dataset:
# In this case data will be loaded later dataset by dataset
return None, None
else:
if train:
data_file = os.path.join(path2data, dataset, "processed", 'training.pt')
else:
data_file = os.path.join(path2data, dataset, "processed", 'test.pt')
if not os.path.isfile(data_file):
raise AssertionError("Missing file: {}".format(data_file))
x_, y_ = torch.load(data_file)
x_ = x_.float() / 255.0
y_ = y_.view(-1).long()
return x_, y_
def visualize_batch(batch, number, shape, path):
batch = batch.cpu().data
image_frame_dim = int(np.floor(np.sqrt(number)))
if shape[2] == 1:
data_np = batch.numpy().reshape(number, shape[0], shape[1], shape[2])
save_images(data_np[:image_frame_dim * image_frame_dim, :, :, :], [image_frame_dim, image_frame_dim],
path)
elif shape[2] == 3:
data = batch.numpy().reshape(number, shape[2], shape[1], shape[0])
make_samples_batche(data[:number], number, path)
else:
save_images(batch[:image_frame_dim * image_frame_dim, :, :, :], [image_frame_dim, image_frame_dim],
path)
def save_images(images, size, image_path):
return imsave(images, size, image_path)
def imsave(images, size, path):
image = np.squeeze(merge(images, size))
image -= np.min(image)
image /= np.max(image) + 1e-12
image = 255 * image # Now scale by 255
image = image.astype(np.uint8)
return imageio.imwrite(path, image)
def merge(images, size):
h, w = images.shape[1], images.shape[2]
if (images.shape[3] in (3, 4)):
c = images.shape[3]
img = np.zeros((h * size[0], w * size[1], c))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w, :] = image
return img
elif images.shape[3] == 1:
img = np.zeros((h * size[0], w * size[1]))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w] = image[:, :, 0]
return img
else:
raise ValueError('in merge(images,size) images parameter ''must have dimensions: HxW or HxWx3 or HxWx4')
def img_stretch(img):
img = img.astype(float)
img -= np.min(img)
img /= np.max(img) + 1e-12
return img
def make_samples_batche(prediction, batch_size, filename_dest):
plt.figure()
batch_size_sqrt = int(np.sqrt(batch_size))
input_channel = prediction[0].shape[0]
input_dim = prediction[0].shape[1]
prediction = np.clip(prediction, 0, 1)
pred = np.rollaxis(prediction.reshape((batch_size_sqrt, batch_size_sqrt, input_channel, input_dim, input_dim)), 2,
5)
pred = pred.swapaxes(2, 1)
pred = pred.reshape((batch_size_sqrt * input_dim, batch_size_sqrt * input_dim, input_channel))
fig, ax = plt.subplots(figsize=(batch_size_sqrt, batch_size_sqrt))
ax.axis('off')
ax.imshow(img_stretch(pred), interpolation='nearest')
ax.grid()
ax.set_xticks([])
ax.set_yticks([])
fig.savefig(filename_dest, bbox_inches='tight', pad_inches=0)
plt.close(fig)
plt.close()
|
StarcoderdataPython
|
3397136
|
#importar cosas
from tkinter import *
from tkinter import messagebox
import os
from time import strftime
import time
import pickle
import random
import os
import sys
import fpdf
from fpdf import FPDF
global continuo
continuo=0
def VentanaPrincipal():
#configuracion ventana principal
Ventana_C=Tk()
Ventana_C.geometry('800x750+450+50')
Ventana_C.title('Futoshiki')
Ventana_C.config(bg='beige')
Ventana_C.resizable(width= False, height=False)
#titulos
Mensaje_titulo=Message(Ventana_C,text="FUTOSHIKI",width='300',font=("Comic Sans",20),bg="#C2D8FB",fg="black")
Mensaje_titulo.place(x=300,y=50)
Mensaje_nombre=Message(Ventana_C,text="Ingrese su nombre:",width='300',font=("Arial",15),bg='beige',fg="black")
Mensaje_nombre.place(x=50,y=100)
Mensaje_reloj=Message(Ventana_C,text="Opciones de reloj:",width='320',font=("Arial",16),bg='beige',fg="black")
Mensaje_reloj.place(x=15,y=605)
Mensaje_panel=Message(Ventana_C,text="Orientacion del panel de digitos:",width='320',font=("Arial",16),bg='beige',fg="black")
Mensaje_panel.place(x=465,y=605)
Mensaje_dificultad=Message(Ventana_C,text="Dicultad:",width='300',font=("Arial",20),bg='beige',fg="black")
Mensaje_dificultad.place(x=300,y=150)
#dato
Nombre_text=StringVar()
Nombre_widget=Entry(Ventana_C,width='50',textvariable=Nombre_text)
Nombre_widget.place(x=250,y=110)
#limita lo que se ingresa
def max_name(Nombre_text):
if len(Nombre_text.get()) > 0:
Nombre_text.set(Nombre_text.get()[:20])
Nombre_text.trace("w", lambda *args: max_name(Nombre_text))
#reloj/temporizador
var=IntVar()
RadioButton1=Radiobutton(Ventana_C, text='Reloj',variable=var,value=1)
RadioButton1.place(x=20,y=640)
RadioButton2=Radiobutton(Ventana_C, text='Temporizador',variable=var,value=2)
RadioButton2.place(x=20,y=660)
RadioButton3=Radiobutton(Ventana_C, text='NO',variable=var,value=3)
RadioButton3.place(x=20,y=680)
#lado de los numeros
var2=IntVar()
RadioButton4=Radiobutton(Ventana_C, text='Izquierda',variable=var2,value=1)
RadioButton4.place(x=700,y=640)
RadioButton5=Radiobutton(Ventana_C, text='Derecha',variable=var2,value=2)
RadioButton5.place(x=700,y=660)
#dificultad
var3=IntVar()
RadioButton6=Radiobutton(Ventana_C, text='Facil',variable=var3,value=1)
RadioButton6.place(x=310,y=190)
RadioButton7=Radiobutton(Ventana_C, text='Intermedio',variable=var3,value=2)
RadioButton7.place(x=310,y=210)
RadioButton8=Radiobutton(Ventana_C, text='Dificil',variable=var3,value=3)
RadioButton8.place(x=310,y=230)
#funcion de multinivel
global continuo
continuo=0
def multinivel():
global continuo
if continuo==1:
continuo=0
Botonnivel["bg"]="grey"
Botonnivel["text"]="Multinivel: Apagado"
else:
continuo=1
Botonnivel["bg"]="green"
Botonnivel["text"]="Multinivel: Encendido"
#boton de multinivel
Botonnivel=Button(Ventana_C,text="Multinivel: Apagado",width='17',height='2',font=("Arial",15),bg='grey',fg="black",command=multinivel)
Botonnivel.place(x=280,y=660)
#archivo de juegos
filesize=os.path.getsize("futoshiki2020partidas.dat")
if filesize==0:
listafacil=[(("2",0,0),("3",2,2)),(("3",3,3),("4",0,3),("1",1,1)),(("1",1,3),("2",0,2))]
listaintermedio=[((("v",0,1),(">",0,2),(">",0,3),(">",1,1),(">",2,3),("˄",2,3),("<",3,3),("˄",3,4)),(("v",0,0),("v",0,1),(">",1,1),(">",2,0),("v",2,1),("v",3,0),("v",3,1),(">",4,3)),((">",0,0),("<",0,2),("˄",0,2),("v",0,4),("v",1,3),("˄",1,4),("v",3,2),("<",3,3),("˄",3,4),(">",4,3)))]
listadificil=[((("4",0,0),("˄",0,1),("2",1,1),("<",1,1),("v",2,0),("<",2,0),("v",2,2),(">",2,3),("1",3,0),("v",3,1),(">",3,2),("<",4,0)),(("<",0,0),("2",0,2),("˄",1,0),(">",1,2),("4",1,3),("v",2,1),("v",2,2),("1",2,3),(">",3,2),("<",4,3)),(("<",0,0),("<",0,1),(">",0,2),("4",1,1),("˄",1,1),(">",1,1),("˄",1,3),("v",2,1),("1",2,4),("<",3,2),("˄",3,2),("<",3,3),(">",4,0)))]
listadefinitiva=[listafacil,listaintermedio,listadificil]
a=open("futoshiki2020partidas.dat","wb")
pickle.dump(listadefinitiva,a)
a.close()
#el juego nuevo
def jugar(Nombre,reloj,lado,dificultad,continuo):
if Nombre=="" or reloj==0 or lado==0 or dificultad==0:#control de errores
messagebox.showerror(message="Error, entradas incompletas")
else:
Ventana_C.withdraw()#nueva ventana y se cierra la anterior
Ventana_J=Tk()#configuracion nueva pantalla
Ventana_J.geometry('800x750+450+50')
Ventana_J.title('Futoshiki')
Ventana_J.config(bg='beige')
Ventana_J.resizable(width= False, height=False)
Mensaje_nombre=Label(Ventana_J,text="Nombre: "+Nombre,font=("Arial",20),bg='beige')
Mensaje_nombre.place(x=275,y=20)#titulos
#para que el boton seleccionado haga algo
global uno
global dos
global tres
global cuatro
global cinco
global num
global lista
global btn0
global btn1
global btn2
global btn3
global btn4
global btn5
global btn6
global btn7
global btn8
global btn9
global btn10
global btn11
global btn12
global btn13
global btn14
global btn15
global btn16
global btn17
global btn18
global btn19
global btn20
global btn21
global btn22
global btn23
global btn24
global cuadricula
global algo
global puntaje
global top10
global listaborrada
global nivelmult
global dificultadC
global sugerido
global relojC
relojC=reloj
sugerido=0
dificultadC=dificultad
uno="1"
dos="2"
tres="3"
cuatro="4"
cinco="5"
num=""
algo=0
puntaje=0
filesize=os.path.getsize("futoshiki2020top10.dat")
if filesize==0:
top10=[]
y=open("futoshiki2020top10.dat","wb")
pickle.dump(top10,y)
y.close()
else:
y=open("futoshiki2020top10.dat","rb")
top10=pickle.load(y)
y.close()
listaborrada=[]
if isinstance(dificultad,list):
lista=dificultad[2]
cuadricula=dificultad[1]
nivelmult=dificultad[3]
btn0=cuadricula[0][0]
btn1=cuadricula[0][1]
btn2=cuadricula[0][2]
btn3=cuadricula[0][3]
btn4=cuadricula[0][4]
btn5=cuadricula[1][0]
btn6=cuadricula[1][1]
btn7=cuadricula[1][2]
btn8=cuadricula[1][3]
btn9=cuadricula[1][4]
btn10=cuadricula[2][0]
btn11=cuadricula[2][1]
btn12=cuadricula[2][2]
btn13=cuadricula[2][3]
btn14=cuadricula[2][4]
btn15=cuadricula[3][0]
btn16=cuadricula[3][1]
btn17=cuadricula[3][2]
btn18=cuadricula[3][3]
btn19=cuadricula[3][4]
btn20=cuadricula[4][0]
btn21=cuadricula[4][1]
btn22=cuadricula[4][2]
btn23=cuadricula[4][3]
btn24=cuadricula[4][4]
algo=1
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
else:
btn0=0
btn1=0
btn2=0
btn3=0
btn4=0
btn5=0
btn6=0
btn7=0
btn8=0
btn9=0
btn10=0
btn11=0
btn12=0
btn13=0
btn14=0
btn15=0
btn16=0
btn17=0
btn18=0
btn19=0
btn20=0
btn21=0
btn22=0
btn23=0
btn24=0
lista=[]
nivelmult=0
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
#funciones de borrado de jugadas
def borra_todo():
global sugerido
if sugerido==1:
sugerencia()
while lista!=[]:
anterior()
def anterior():
global listaborrada
global lista
global num
global btn0
global btn1
global btn2
global btn3
global btn4
global btn5
global btn6
global btn7
global btn8
global btn9
global btn10
global btn11
global btn12
global btn13
global btn14
global btn15
global btn16
global btn17
global btn18
global btn19
global btn20
global btn21
global btn22
global btn23
global btn24
global cuadricula
global sugerido
if sugerido==1:
sugerencia()
if lista==[]:
messagebox.showerror(message="Error, ya no hay jugadas anteriores a esta")
elif lista[-1]==0:
listaborrada.append((lista[-1],btn0))
btn0=0
lista=lista[:-1]
num=""
boton0["text"]=num
boton0["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==1:
listaborrada.append((lista[-1],btn1))
btn1=0
lista=lista[:-1]
num=""
boton1["text"]=num
boton1["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==2:
listaborrada.append((lista[-1],btn2))
btn2=0
lista=lista[:-1]
num=""
boton2["text"]=num
boton2["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==3:
listaborrada.append((lista[-1],btn3))
btn3=0
lista=lista[:-1]
num=""
boton3["text"]=num
boton3["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==4:
listaborrada.append((lista[-1],btn4))
btn4=0
lista=lista[:-1]
num=""
boton4["text"]=num
boton4["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==5:
listaborrada.append((lista[-1],btn5))
btn5=0
lista=lista[:-1]
num=""
boton5["text"]=num
boton5["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==6:
listaborrada.append((lista[-1],btn6))
btn6=0
lista=lista[:-1]
num=""
boton6["text"]=num
boton6["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==7:
listaborrada.append((lista[-1],btn7))
btn7=0
lista=lista[:-1]
num=""
boton7["text"]=num
boton7["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==8:
listaborrada.append((lista[-1],btn8))
btn8=0
lista=lista[:-1]
num=""
boton8["text"]=num
boton8["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==9:
listaborrada.append((lista[-1],btn9))
btn9=0
lista=lista[:-1]
num=""
boton9["text"]=num
boton9["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==10:
listaborrada.append((lista[-1],btn10))
btn10=0
lista=lista[:-1]
num=""
boton10["text"]=num
boton10["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==11:
listaborrada.append((lista[-1],btn11))
btn11=0
lista=lista[:-1]
num=""
boton11["text"]=num
boton11["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==12:
listaborrada.append((lista[-1],btn12))
btn12=0
lista=lista[:-1]
num=""
boton12["text"]=num
boton12["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==13:
listaborrada.append((lista[-1],btn13))
btn13=0
lista=lista[:-1]
num=""
boton13["text"]=num
boton13["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==14:
listaborrada.append((lista[-1],btn14))
btn14=0
lista=lista[:-1]
num=""
boton14["text"]=num
boton14["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==15:
listaborrada.append((lista[-1],btn15))
btn15=0
lista=lista[:-1]
num=""
boton15["text"]=num
boton15["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==16:
listaborrada.append((lista[-1],btn16))
btn16=0
lista=lista[:-1]
num=""
boton16["text"]=num
boton16["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==17:
listaborrada.append((lista[-1],btn17))
btn17=0
lista=lista[:-1]
num=""
boton17["text"]=num
boton17["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==18:
listaborrada.append((lista[-1],btn18))
btn18=0
lista=lista[:-1]
num=""
boton18["text"]=num
boton18["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==19:
listaborrada.append((lista[-1],btn19))
btn19=0
lista=lista[:-1]
num=""
boton19["text"]=num
boton19["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==20:
listaborrada.append((lista[-1],btn20))
btn20=0
lista=lista[:-1]
num=""
boton20["text"]=num
boton20["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==21:
listaborrada.append((lista[-1],btn21))
btn21=0
lista=lista[:-1]
num=""
boton21["text"]=num
boton21["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==22:
listaborrada.append((lista[-1],btn22))
btn22=0
lista=lista[:-1]
num=""
boton22["text"]=num
boton22["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==23:
listaborrada.append((lista[-1],btn23))
btn23=0
lista=lista[:-1]
num=""
boton23["text"]=num
boton23["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif lista[-1]==24:
listaborrada.append((lista[-1],btn24))
btn24=0
lista=lista[:-1]
num=""
boton24["text"]=num
boton24["state"]='normal'
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
#encuentra si ya se ganó
def ganar(cuadricula):
global continuo
global dificultadC
if isinstance(dificultadC,list):
dificultadC=dificultadC[0]
ceros=0
for i in cuadricula:
for j in i:
if j==0 or j=="0":
ceros+=1
if ceros==0:
if reloj==1:
parar()
Boton4_J["state"]=DISABLED
Boton5_J["state"]=DISABLED
Boton6_J["state"]=DISABLED
Boton0_num["state"]=DISABLED
Boton1_num["state"]=DISABLED
Boton2_num["state"]=DISABLED
Boton3_num["state"]=DISABLED
Boton4_num["state"]=DISABLED
if continuo==0:
Mensaje_ganar=Message(Ventana_J,text="¡EXCELENTE! JUEGO TERMINADO CON ÉXITO",width='885',font=("Comic Sans",40),bg="#C2D8FB",fg="black")
Mensaje_ganar.place(x=10,y=475)
elif continuo==1 and dificultadC <3:
Mensaje_ganar=Message(Ventana_J,text="¡EXCELENTE! NIVEL TERMINADO CON ÉXITO",width='885',font=("Comic Sans",40),bg="#C2D8FB",fg="black")
Mensaje_ganar.place(x=10,y=475)
else:
continuo=0
Mensaje_ganar=Message(Ventana_J,text="¡EXCELENTE! JUEGO TERMINADO CON ÉXITO",width='885',font=("Comic Sans",40),bg="#C2D8FB",fg="black")
Mensaje_ganar.place(x=10,y=475)
def cerrar():
global continuo
global dificultadC
filesize=os.path.getsize("futoshiki2020top10.dat")
if filesize!=0:
file = open("futoshiki2020top10.dat","r+")
file.truncate(0)
file.close()
top10.append((Nombre,puntaje))
y=open("futoshiki2020top10.dat","wb")
pickle.dump(top10,y)
y.close()
if continuo==1:
Ventana_J.destroy()
jugar(Nombre,reloj,lado,dificultadC+1,continuo)
else:
Ventana_J.destroy()
Ventana_C.destroy()
if continuo==0:
btfin=Button(Ventana_J,text="Fin",width='11',height='3',command=cerrar)
else:
btfin=Button(Ventana_J,text="siguiente",width='11',height='3',command=cerrar)
btfin.place(x=300,y=200)
#funciones de los botones de la cuadricula
def numero(n):
global num
num=n
if num=="1":
Boton0_num["bg"]="green"
Boton1_num["bg"]="white"
Boton2_num["bg"]="white"
Boton3_num["bg"]="white"
Boton4_num["bg"]="white"
elif num=="2":
Boton0_num["bg"]="white"
Boton1_num["bg"]="green"
Boton2_num["bg"]="white"
Boton3_num["bg"]="white"
Boton4_num["bg"]="white"
elif num=="3":
Boton0_num["bg"]="white"
Boton1_num["bg"]="white"
Boton2_num["bg"]="green"
Boton3_num["bg"]="white"
Boton4_num["bg"]="white"
elif num=="4":
Boton0_num["bg"]="white"
Boton1_num["bg"]="white"
Boton2_num["bg"]="white"
Boton3_num["bg"]="green"
Boton4_num["bg"]="white"
elif num=="5":
Boton0_num["bg"]="white"
Boton1_num["bg"]="white"
Boton2_num["bg"]="white"
Boton3_num["bg"]="white"
Boton4_num["bg"]="green"
#agregar para que cambie de color al presionar
def original0():
global num
global btn0
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[0]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[0]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==0 and i[2]==0:
if btn1!=0 and btn5!=0:
if i[0]=="<":
if num>=btn1:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn1:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn5:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn5:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn0=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton0["text"]=num
boton0["state"]=DISABLED
lista.append(0)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original1():
global num
global btn1
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[1]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[0]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==0 and i[2]==1:
if btn2!=0 and btn6!=0:
if i[0]=="<":
if num>=btn2:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn2:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn6:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn6:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn1=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton1["text"]=num
boton1["state"]=DISABLED
lista.append(1)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original2():
global num
global btn2
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[2]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[0]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==0 and i[2]==2:
if btn3!=0 and btn7!=0:
if i[0]=="<":
if num>=btn3:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn3:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn7:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn7:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn2=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton2["text"]=num
boton2["state"]=DISABLED
lista.append(2)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original3():
global num
global btn3
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[3]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[0]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==0 and i[2]==3:
if btn4!=0 and btn8!=0:
if i[0]=="<":
if num>=btn4:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn4:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn8:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn8:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn3=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton3["text"]=num
boton3["state"]=DISABLED
lista.append(3)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original4():
global num
global btn4
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[4]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[0]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==0 and i[2]==4:
if btn9!=0:
if i[0]=="v":
if num<=btn9:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn9:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn4=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton4["text"]=num
boton4["state"]=DISABLED
lista.append(4)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original5():
global num
global btn5
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[0]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[1]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==1 and i[2]==0:
if btn6!=0 and btn10!=0:
if i[0]=="<":
if num>=btn6:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn6:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn10:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn10:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn5=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton5["text"]=num
boton5["state"]=DISABLED
lista.append(5)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original6():
global num
global btn6
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[1]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[1]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==1 and i[2]==1:
if btn7!=0 and btn11!=0:
if i[0]=="<":
if num>=btn7:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn7:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn11:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn11:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn6=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton6["text"]=num
boton6["state"]=DISABLED
lista.append(6)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original7():
global num
global btn7
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[2]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[1]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==1 and i[2]==2:
if btn8!=0 and btn12!=0:
if i[0]=="<":
if num>=btn8:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn8:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn12:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn12:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn7=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton7["text"]=num
boton7["state"]=DISABLED
lista.append(7)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original8():
global num
global btn8
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[3]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[1]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==1 and i[2]==3:
if btn9!=0 and btn13!=0:
if i[0]=="<":
if num>=btn9:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn9:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn13:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn13:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn8=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton8["text"]=num
boton8["state"]=DISABLED
lista.append(8)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original9():
global num
global btn9
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[4]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[1]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==1 and i[2]==4:
if btn14!=0:
if i[0]=="v":
if num<=btn14:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn14:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn9=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton9["text"]=num
boton9["state"]=DISABLED
lista.append(9)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original10():
global num
global btn10
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[0]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[2]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==2 and i[2]==0:
if btn11!=0 and btn15!=0:
if i[0]=="<":
if num>=btn11:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn11:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn15:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn15:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn10=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton10["text"]=num
boton10["state"]=DISABLED
lista.append(10)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original11():
global num
global btn11
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[1]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[2]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==2 and i[2]==1:
if btn12!=0 and btn16!=0:
if i[0]=="<":
if num>=btn12:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn12:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn16:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn16:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn11=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton11["text"]=num
boton11["state"]=DISABLED
lista.append(11)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original12():
global num
global btn12
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[2]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[2]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==2 and i[2]==2:
if btn13!=0 and btn17!=0:
if i[0]=="<":
if num>=btn13:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn13:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn17:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn17:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn12=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton12["text"]=num
boton12["state"]=DISABLED
lista.append(12)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original13():
global num
global btn13
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[3]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[2]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==2 and i[2]==3:
if btn14!=0 and btn18!=0:
if i[0]=="<":
if num>=btn14:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn14:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn18:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn18:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn13=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton13["text"]=num
boton13["state"]=DISABLED
lista.append(13)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original14():
global num
global btn14
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[4]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[2]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==2 and i[2]==4:
if btn19!=0:
if i[0]=="v":
if num<=btn19:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn19:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn14=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton14["text"]=num
boton14["state"]=DISABLED
lista.append(14)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original15():
global num
global btn15
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[0]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[3]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==3 and i[2]==0:
if btn16!=0 and btn20!=0:
if i[0]=="<":
if num>=btn16:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn16:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn20:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn20:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn15=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton15["text"]=num
boton15["state"]=DISABLED
lista.append(15)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original16():
global num
global btn16
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[1]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[3]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==3 and i[2]==1:
if btn17!=0 and btn21!=0:
if i[0]=="<":
if num>=btn17:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn17:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn21:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn21:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn16=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton16["text"]=num
boton16["state"]=DISABLED
lista.append(16)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original17():
global num
global btn17
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[2]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[3]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==3 and i[2]==2:
if btn18!=0 and btn22!=0:
if i[0]=="<":
if num>=btn18:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn18:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn22:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn22:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn17=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton17["text"]=num
boton17["state"]=DISABLED
lista.append(17)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original18():
global num
global btn18
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[3]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[3]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==3 and i[2]==3:
if btn19!=0 and btn23!=0:
if i[0]=="<":
if num>=btn19:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn19:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
elif i[0]=="v":
if num<=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn18=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton18["text"]=num
boton18["state"]=DISABLED
lista.append(18)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original19():
global num
global btn19
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[4]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[3]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==3 and i[2]==4:
if btn24!=0:
if i[0]=="v":
if num<=btn24:
cumple=1
messagebox.showerror(message="numero no cumple con el signo v ")
elif i[0]=="˄":
if num>=btn24:
cumple=1
messagebox.showerror(message="numero no cumple con el signo ˄ ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn19=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton19["text"]=num
boton19["state"]=DISABLED
lista.append(19)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original20():
global num
global btn20
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[0]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[4]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==4 and i[2]==0:
if btn21!=0:
if i[0]=="<":
if num>=btn21:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn21:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn20=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton20["text"]=num
boton20["state"]=DISABLED
lista.append(20)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original21():
global num
global btn21
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[1]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[4]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==4 and i[2]==1:
if btn22!=0:
if i[0]=="<":
if num>=btn22:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn22:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn21=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton21["text"]=num
boton21["state"]=DISABLED
lista.append(21)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original22():
global num
global btn22
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[2]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[4]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==4 and i[2]==2:
if btn23!=0:
if i[0]=="<":
if num>=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn22=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton22["text"]=num
boton22["state"]=DISABLED
lista.append(22)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original23():
global num
global btn23
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[3]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[4]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==4 and i[2]==3:
if btn24!=0:
if i[0]=="<":
if num>=btn24:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num<=btn24:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn23=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton23["text"]=num
boton23["state"]=DISABLED
lista.append(23)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
def original24():
global num
global btn24
global cuadricula
global listaborrada
global sugerido
listaborrada=[]
cumple=0
if sugerido==0:
for i in cuadricula:
if num==i[4]:
cumple=1
messagebox.showerror(message="Este numero ya esta en esta columna")
if num in cuadricula[4]:
cumple=1
messagebox.showerror(message="Este numero ya está en la fila")
for i in plantilla:
if i[1]==4 and i[2]==3:
if btn23!=0:
if i[0]=="<":
if num<=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo < ")
elif i[0]==">":
if num>=btn23:
cumple=1
messagebox.showerror(message="numero no cumple con el signo > ")
if cumple==0:
if num=="1":
Boton0_num["bg"]="white"
elif num=="2":
Boton1_num["bg"]="white"
elif num=="3":
Boton2_num["bg"]="white"
elif num=="4":
Boton3_num["bg"]="white"
else:
Boton4_num["bg"]="white"
if num!="":
btn24=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton24["text"]=num
boton24["state"]=DISABLED
lista.append(24)
num=""
ganar(cuadricula)
else:
sugerencia()
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
messagebox.showinfo(nuevalista)
#reloj
def parar():
global proceso
tiempo.after_cancel(proceso)
global Estado
if reloj==1:
Estado="normal"
proceso=0
def iniciar(h=0, m=0, s=0):
global puntaje
puntaje+=1
global proceso
if s >= 60:
s=0
m=m+1
if m >= 60:
m=0
h=h+1
if h >= 24:
h=0
tiempo['text'] = str(h)+":"+str(m)+":"+str(s)
proceso=tiempo.after(1000, iniciar, (h), (m), (s+1))
tiempo= Label(Ventana_J, fg='black', width=10, font=("","18"))
tiempo.place(x=20,y=700)
iniciar()
elif reloj==2:
Estado=DISABLED
global sec
global minuto
global hora
sec=IntVar()
minuto=IntVar()
hora=IntVar()
sec=Entry(Ventana_J,width='10',textvariable=sec)
sec.place(x=160,y=698)
minuto=Entry(Ventana_J,width='10',textvariable=minuto)
minuto.place(x=90,y=698)
hora=Entry(Ventana_J,width='10',textvariable=hora)
hora.place(x=20,y=698)
def subir():
global Estado
Estado="normal"
Boton1_J["state"]=Estado
Boton2_J["state"]=Estado
Boton3_J["state"]=Estado
Boton4_J["state"]=Estado
Boton5_J["state"]=Estado
Boton6_J["state"]=Estado
boton0["state"]=Estado
boton1["state"]=Estado
boton2["state"]=Estado
boton3["state"]=Estado
boton4["state"]=Estado
boton5["state"]=Estado
boton6["state"]=Estado
boton7["state"]=Estado
boton8["state"]=Estado
boton9["state"]=Estado
boton10["state"]=Estado
boton11["state"]=Estado
boton12["state"]=Estado
boton13["state"]=Estado
boton14["state"]=Estado
boton15["state"]=Estado
boton16["state"]=Estado
boton17["state"]=Estado
boton18["state"]=Estado
boton19["state"]=Estado
boton20["state"]=Estado
boton21["state"]=Estado
boton22["state"]=Estado
boton23["state"]=Estado
boton24["state"]=Estado
Boton0_num["state"]=Estado
Boton1_num["state"]=Estado
Boton2_num["state"]=Estado
Boton3_num["state"]=Estado
Boton4_num["state"]=Estado
Botonsugerencia["state"]=Estado
btn["state"]=DISABLED
secs=sec.get()
mins=minuto.get()
horas=hora.get()
proceso=0
def acabado():
Ventana_Z=Tk()
Ventana_Z.geometry('300x75+650+700')
Ventana_Z.title('Futoshiki')
Ventana_Z.config(bg='beige')
Ventana_Z.resizable(width= False, height=False)
Mensaje_tiempo=Label(Ventana_Z,text="Se acabó el tiempo, desea continuar?",font=("Arial",10),bg='beige')
Mensaje_tiempo.place(x=25,y=10)
def aceptarT():
global relojC
relojC=1
reloj=relojC
relojC=reloj
guardar(reloj)
Ventana_J.destroy()
Ventana_Z.destroy()
cargar()
def denegarT():
Ventana_Z.destroy()
Ventana_J.destroy()
Ventana_C.deiconify()
btsit=Button(Ventana_Z,text="SI",width='5',height='1',command=aceptarT)
btsit.place(x=10,y=30)
btnot=Button(Ventana_Z,text="NO",width='5',height='1',command=denegarT)
btnot.place(x=60,y=30)
def start(h, m, s):
global puntaje
puntaje+=1
global proceso
if h==0 and m==0 and s==0:
acabado()
if s<1 and m<=0:
s=0
if m<1 and h==0:
m=0
elif m<1:
m=59
h-=1
if h<1:
h=0
elif s<1:
s=59
m-=1
tiempo_Up['text'] = str(h)+":"+str(m)+":"+str(s)
proceso=tiempo_Up.after(1000, start, (h), (m), (s-1))
tiempo_Up= Label(Ventana_J, fg='black', width=20, font=("","18"))
tiempo_Up.place(x=20,y=685)
start(int(horas),int(mins),int(secs))
btn = Button(Ventana_J, text='iniciar', bd='5',command=subir)
btn.place(x = 20,y = 718)
else:
Estado="normal"
#creacion de la tabla
listaBotones=[]
fila=170
columna=100
boton0=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original0)
listaBotones.append(boton0)
boton1=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original1)
listaBotones.append(boton1)
boton2=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original2)
listaBotones.append(boton2)
boton3=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original3)
listaBotones.append(boton3)
boton4=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original4)
listaBotones.append(boton4)
boton5=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original5)
listaBotones.append(boton5)
boton6=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original6)
listaBotones.append(boton6)
boton7=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original7)
listaBotones.append(boton7)
boton8=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original8)
listaBotones.append(boton8)
boton9=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original9)
listaBotones.append(boton9)
boton10=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original10)
listaBotones.append(boton10)
boton11=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original11)
listaBotones.append(boton11)
boton12=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original12)
listaBotones.append(boton12)
boton13=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original13)
listaBotones.append(boton13)
boton14=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original14)
listaBotones.append(boton14)
boton15=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original15)
listaBotones.append(boton15)
boton16=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original16)
listaBotones.append(boton16)
boton17=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original17)
listaBotones.append(boton17)
boton18=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original18)
listaBotones.append(boton18)
boton19=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original19)
listaBotones.append(boton19)
boton20=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original20)
listaBotones.append(boton20)
boton21=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original21)
listaBotones.append(boton21)
boton22=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original22)
listaBotones.append(boton22)
boton23=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original23)
listaBotones.append(boton23)
boton24=Button(Ventana_J,text=num,width='9',height='3',state=Estado,command=original24)
listaBotones.append(boton24)
boton=0
if btn0!=0:
boton0["text"]=btn0
boton0["state"]=DISABLED
if btn1!=0:
boton1["text"]=btn1
boton1["state"]=DISABLED
if btn2!=0:
boton2["text"]=btn2
boton2["state"]=DISABLED
if btn3!=0:
boton3["text"]=btn3
boton3["state"]=DISABLED
if btn4!=0:
boton4["text"]=btn4
boton4["state"]=DISABLED
if btn5!=0:
boton5["text"]=btn5
boton5["state"]=DISABLED
if btn6!=0:
boton6["text"]=btn6
boton6["state"]=DISABLED
if btn7!=0:
boton7["text"]=btn7
boton7["state"]=DISABLED
if btn8!=0:
boton8["text"]=btn8
boton8["state"]=DISABLED
if btn9!=0:
boton9["text"]=btn9
boton9["state"]=DISABLED
if btn10!=0:
boton10["text"]=btn10
boton10["state"]=DISABLED
if btn11!=0:
boton11["text"]=btn11
boton11["state"]=DISABLED
if btn12!=0:
boton12["text"]=btn12
boton12["state"]=DISABLED
if btn13!=0:
boton13["text"]=btn13
boton13["state"]=DISABLED
if btn14!=0:
boton14["text"]=btn14
boton14["state"]=DISABLED
if btn15!=0:
boton15["text"]=btn15
boton15["state"]=DISABLED
if btn16!=0:
boton16["text"]=btn16
boton16["state"]=DISABLED
if btn17!=0:
boton17["text"]=btn17
boton17["state"]=DISABLED
if btn18!=0:
boton18["text"]=btn18
boton18["state"]=DISABLED
if btn19!=0:
boton19["text"]=btn19
boton19["state"]=DISABLED
if btn20!=0:
boton20["text"]=btn20
boton20["state"]=DISABLED
if btn21!=0:
boton21["text"]=btn21
boton21["state"]=DISABLED
if btn22!=0:
boton22["text"]=btn22
boton22["state"]=DISABLED
if btn23!=0:
boton23["text"]=btn23
boton23["state"]=DISABLED
if btn24!=0:
boton24["text"]=btn24
boton24["state"]=DISABLED
for i in range(0,5):
for j in range(0,5):
listaBotones[boton].place(x=fila,y=columna)
boton+=1
fila+=90
columna+=80
fila=170
global plantilla
#consigue la plantilla
if isinstance(dificultad,list):
plantilla=dificultad[0]
else:
a=open("futoshiki2020partidas.dat","rb")
plantilla=pickle.load(a)
a.close()
numT=random.randrange(0,len(plantilla))
numT2=dificultad-1
if numT2==0:
plantilla=plantilla[numT2][numT]
else:
plantilla=plantilla[numT2][0][numT]
#dibuja los signos y agrega los numeros necesarios
for i in plantilla:
if i[0].isdigit():
if i[1]==0 and i[2]==0:
boton0["text"]=i[0]
boton0["state"]=DISABLED
btn0=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==0 and i[2]==1:
boton1["text"]=i[0]
boton1["state"]=DISABLED
btn1=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==0 and i[2]==2:
boton2["text"]=i[0]
boton2["state"]=DISABLED
btn2=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==0 and i[2]==3:
boton3["text"]=i[0]
boton3["state"]=DISABLED
btn3=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==0 and i[2]==4:
boton4["text"]=i[0]
boton4["state"]=DISABLED
btn4=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==1 and i[2]==0:
boton5["text"]=i[0]
boton5["state"]=DISABLED
btn5=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==1 and i[2]==1:
boton6["text"]=i[0]
boton6["state"]=DISABLED
btn6=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==1 and i[2]==2:
boton7["text"]=i[0]
boton7["state"]=DISABLED
btn7=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==1 and i[2]==3:
boton8["text"]=i[0]
boton8["state"]=DISABLED
btn8=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==1 and i[2]==4:
boton9["text"]=i[0]
boton9["state"]=DISABLED
btn9=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==2 and i[2]==0:
boton10["text"]=i[0]
boton10["state"]=DISABLED
btn10=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==2 and i[2]==1:
boton11["text"]=i[0]
boton11["state"]=DISABLED
btn11=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==2 and i[2]==2:
boton12["text"]=i[0]
boton12["state"]=DISABLED
btn12=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==2 and i[2]==3:
boton13["text"]=i[0]
boton13["state"]=DISABLED
btn13=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==2 and i[2]==4:
boton14["text"]=i[0]
boton14["state"]=DISABLED
btn14=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==3 and i[2]==0:
boton15["text"]=i[0]
boton15["state"]=DISABLED
btn15=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==3 and i[2]==1:
boton16["text"]=i[0]
boton16["state"]=DISABLED
btn16=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==3 and i[2]==2:
boton17["text"]=i[0]
boton17["state"]=DISABLED
btn17=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==3 and i[2]==3:
boton18["text"]=i[0]
boton18["state"]=DISABLED
btn18=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==3 and i[2]==4:
boton19["text"]=i[0]
boton19["state"]=DISABLED
btn19=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==4 and i[2]==0:
boton20["text"]=i[0]
boton20["state"]=DISABLED
btn20=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==4 and i[2]==1:
boton21["text"]=i[0]
boton21["state"]=DISABLED
btn21=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==4 and i[2]==2:
boton22["text"]=i[0]
boton22["state"]=DISABLED
btn22=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==4 and i[2]==3:
boton23["text"]=i[0]
boton23["state"]=DISABLED
btn23=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif i[1]==4 and i[2]==4:
boton24["text"]=i[0]
boton24["state"]=DISABLED
btn24=i[0]
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
else:
if i[1]==0 and i[2]==0:
lbl1=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl1.place(x=200,y=156)
else:
lbl1.place(x=245,y=115)
elif i[1]==0 and i[2]==1:
lbl2=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl2.place(x=290,y=156)
else:
lbl2.place(x=335,y=115)
elif i[1]==0 and i[2]==2:
lbl3=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl3.place(x=380,y=156)
else:
lbl3.place(x=425,y=115)
elif i[1]==0 and i[2]==3:
lbl4=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl4.place(x=470,y=156)
else:
lbl4.place(x=515,y=115)
elif i[1]==0 and i[2]==4:
lbl5=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl5.place(x=560,y=156)
else:
lbl5.place(x=605,y=115)
elif i[1]==1 and i[2]==0:
lbl6=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl6.place(x=200,y=236)
else:
lbl6.place(x=240,y=195)
elif i[1]==1 and i[2]==1:
lbl7=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl7.place(x=290,y=236)
else:
lbl7.place(x=335,y=195)
elif i[1]==1 and i[2]==2:
lbl8=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl8.place(x=380,y=236)
else:
lbl8.place(x=425,y=195)
elif i[1]==1 and i[2]==3:
lbl9=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl9.place(x=470,y=236)
else:
lbl9.place(x=515,y=195)
elif i[1]==1 and i[2]==4:
lbl10=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl10.place(x=560,y=236)
else:
lbl10.place(x=605,y=195)
elif i[1]==2 and i[2]==0:
lbl11=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl11.place(x=200,y=316)
else:
lbl11.place(x=245,y=275)
elif i[1]==2 and i[2]==1:
lbl12=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl12.place(x=290,y=316)
else:
lbl12.place(x=335,y=275)
elif i[1]==2 and i[2]==2:
lbl13=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl13.place(x=380,y=316)
else:
lbl13.place(x=425,y=275)
elif i[1]==2 and i[2]==3:
lbl14=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl14.place(x=470,y=316)
else:
lbl14.place(x=515,y=275)
elif i[1]==2 and i[2]==4:
lbl15=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl15.place(x=560,y=316)
else:
lbl15.place(x=605,y=275)
elif i[1]==3 and i[2]==0:
lbl16=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl16.place(x=200,y=396)
else:
lbl16.place(x=245,y=355)
elif i[1]==3 and i[2]==1:
lbl17=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl17.place(x=290,y=396)
else:
lbl17.place(x=335,y=355)
elif i[1]==3 and i[2]==2:
lbl18=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl18.place(x=470,y=396)
else:
lbl18.place(x=515,y=355)
elif i[1]==3 and i[2]==3:
lbl19=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl19.place(x=380,y=396)
else:
lbl19.place(x=425,y=355)
elif i[1]==3 and i[2]==4:
lbl20=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl20.place(x=560,y=396)
else:
lbl20.place(x=605,y=355)
elif i[1]==4 and i[2]==0:
lbl21=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl21.place(x=200,y=476)
else:
lbl21.place(x=245,y=435)
elif i[1]==4 and i[2]==1:
lbl22=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl22.place(x=290,y=476)
else:
lbl22.place(x=335,y=435)
elif i[1]==4 and i[2]==2:
lbl23=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl23.place(x=380,y=476)
else:
lbl23.place(x=425,y=435)
elif i[1]==4 and i[2]==3:
lbl24=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl24.place(x=470,y=476)
else:
lbl24.place(x=515,y=435)
elif i[1]==4 and i[2]==4:
lbl25=Label(Ventana_J,text=i[0],bg="beige")
if i[0]!=">" and i[0]!="<":
lbl25.place(x=560,y=476)
else:
lbl25.place(x=605,y=435)
#boton de terminar partida
def terminar():
global sugerido
if sugerido==1:
sugerencia()
Ventana_T=Tk()
Ventana_T.geometry('300x75+650+700')
Ventana_T.title('Futoshiki')
Ventana_T.config(bg='beige')
Ventana_T.resizable(width= False, height=False)
Mensaje_nombre=Label(Ventana_T,text="¿Está seguro de salir?",font=("Arial",10),bg='beige')
Mensaje_nombre.place(x=25,y=10)
def aceptar():
Ventana_J.destroy()
Ventana_T.destroy()
Ventana_C.deiconify()
def denegar():
Ventana_T.destroy()
btsi=Button(Ventana_T,text="SI",width='5',height='1',command=aceptar)
btsi.place(x=10,y=30)
btno=Button(Ventana_T,text="NO",width='5',height='1',command=denegar)
btno.place(x=60,y=30)
#guarda la partida
def guardar(reloj):
global sugerido
if sugerido==1:
sugerencia()
filesize=os.path.getsize("futoshiki2020juegoactual.dat")
if filesize!=0:
file = open("futoshiki2020juegoactual.dat","r+")
file.truncate(0)
file.close()
d=open("futoshiki2020juegoactual.dat","wb")
pickle.dump([Nombre,reloj,lado,[plantilla,cuadricula,lista,nivelmult],continuo],d)
d.close()
#rehace jugada
def rehacer():
global listaborrada
global lista
global num
global btn0
global btn1
global btn2
global btn3
global btn4
global btn5
global btn6
global btn7
global btn8
global btn9
global btn10
global btn11
global btn12
global btn13
global btn14
global btn15
global btn16
global btn17
global btn18
global btn19
global btn20
global btn21
global btn22
global btn23
global btn24
global cuadricula
global sugerido
if sugerido==1:
sugerencia()
if listaborrada==[]:
messagebox.showerror(message="Error, ya no hay jugadas borradas")
elif listaborrada[-1][0]==0:
btn0=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton0["text"]=num
boton0["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==1:
btn1=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton1["text"]=num
boton1["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==2:
btn2=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton2["text"]=num
boton2["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==3:
btn3=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton3["text"]=num
boton3["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==4:
btn4=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton4["text"]=num
boton4["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==5:
btn5=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton5["text"]=num
boton5["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==6:
btn6=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton6["text"]=num
boton6["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==7:
btn7=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton7["text"]=num
boton7["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==8:
btn8=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton8["text"]=num
boton8["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==9:
btn9=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton9["text"]=num
boton9["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==10:
btn10=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton10["text"]=num
boton10["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==11:
btn11=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton11["text"]=num
boton11["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==12:
btn12=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton12["text"]=num
boton12["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==13:
btn13=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton13["text"]=num
boton13["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==14:
btn14=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton14["text"]=num
boton14["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==15:
btn15=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton15["text"]=num
boton15["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==16:
btn16=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton16["text"]=num
boton16["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==17:
btn17=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton17["text"]=num
boton17["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==18:
btn18=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton18["text"]=num
boton18["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==19:
btn19=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton19["text"]=num
boton19["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==20:
btn20=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton20["text"]=num
boton20["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==21:
btn21=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton21["text"]=num
boton21["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==22:
btn22=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton22["text"]=num
boton22["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==23:
btn23=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton23["text"]=num
boton23["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
elif listaborrada[-1][0]==24:
btn24=listaborrada[-1][1]
lista.append(listaborrada[-1][0])
num=listaborrada[-1][1]
listaborrada=listaborrada[:-1]
boton24["text"]=num
boton24["state"]=DISABLED
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
#soluciona el futoshiki
def solucionar():
global num
global btn0
global btn1
global btn2
global btn3
global btn4
global btn5
global btn6
global btn7
global btn8
global btn9
global btn10
global btn11
global btn12
global btn13
global btn14
global btn15
global btn16
global btn17
global btn18
global btn19
global btn20
global btn21
global btn22
global btn23
global btn24
global cuadricula
global gano
gano=False
def cerrarCOPY():
Ventana_J.destroy()
Ventana_C.deiconify()
def originalC0(num):
global btn0
global cuadricula
for i in cuadricula:
if num==i[0]:
return False
if num in cuadricula[0]:
return False
for i in plantilla:
if i[1]==0 and i[2]==0:
if btn1!=0 and btn5!=0:
if i[0]=="<":
if num>=btn1:
return False
elif i[0]==">":
if num<=btn1:
return False
elif i[0]=="v":
if num<=btn5:
return False
elif i[0]=="˄":
if num>=btn5:
return False
btn0=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton0["text"]=num
boton0["state"]=DISABLED
num=""
return True
def originalC1(num):
global btn1
global cuadricula
for i in cuadricula:
if num==i[1]:
return False
if num in cuadricula[0]:
return False
for i in plantilla:
if i[1]==0 and i[2]==1:
if btn2!=0 and btn6!=0:
if i[0]=="<":
if num>=btn2:
return False
elif i[0]==">":
if num<=btn2:
return False
elif i[0]=="v":
if num<=btn6:
return False
elif i[0]=="˄":
if num>=btn6:
return False
btn1=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton1["text"]=num
boton1["state"]=DISABLED
num=""
return True
def originalC2(num):
global btn2
global cuadricula
for i in cuadricula:
if num==i[2]:
return False
if num in cuadricula[0]:
return False
for i in plantilla:
if i[1]==0 and i[2]==2:
if btn3!=0 and btn7!=0:
if i[0]=="<":
if num>=btn3:
return False
elif i[0]==">":
if num<=btn3:
return False
elif i[0]=="v":
if num<=btn7:
return False
elif i[0]=="˄":
if num>=btn7:
return False
btn2=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton2["text"]=num
boton2["state"]=DISABLED
num=""
return True
def originalC3(num):
global btn3
global cuadricula
for i in cuadricula:
if num==i[3]:
return False
if num in cuadricula[0]:
return False
for i in plantilla:
if i[1]==0 and i[2]==3:
if btn4!=0 and btn8!=0:
if i[0]=="<":
if num>=btn3:
return False
elif i[0]==">":
if num<=btn3:
return False
elif i[0]=="v":
if num<=btn8:
return False
elif i[0]=="˄":
if num>=btn8:
return False
btn3=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton3["text"]=num
boton3["state"]=DISABLED
num=""
return True
def originalC4(num):
global btn4
global cuadricula
for i in cuadricula:
if num==i[4]:
return False
if num in cuadricula[0]:
return False
for i in plantilla:
if i[1]==0 and i[2]==4:
if btn9!=0:
if i[0]=="v":
if num<=btn9:
return False
elif i[0]=="˄":
if num>=btn9:
return False
btn4=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton4["text"]=num
boton4["state"]=DISABLED
num=""
return True
def originalC5(num):
global btn5
global cuadricula
for i in cuadricula:
if num==i[0]:
return False
if num in cuadricula[1]:
return False
for i in plantilla:
if i[1]==1 and i[2]==0:
if btn6!=0 and btn10!=0:
if i[0]=="<":
if num>=btn6:
return False
elif i[0]==">":
if num<=btn6:
return False
elif i[0]=="v":
if num<=btn10:
return False
elif i[0]=="˄":
if num>=btn10:
return False
btn5=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton5["text"]=num
boton5["state"]=DISABLED
num=""
return True
def originalC6(num):
global btn6
global cuadricula
for i in cuadricula:
if num==i[1]:
return False
if num in cuadricula[1]:
return False
for i in plantilla:
if i[1]==1 and i[2]==1:
if btn7!=0 and btn11!=0:
if i[0]=="<":
if num>=btn7:
return False
elif i[0]==">":
if num<=btn7:
return False
elif i[0]=="v":
if num<=btn11:
return False
elif i[0]=="˄":
if num>=btn11:
return False
btn6=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton6["text"]=num
boton6["state"]=DISABLED
num=""
return True
def originalC7(num):
global btn7
global cuadricula
for i in cuadricula:
if num==i[2]:
return False
if num in cuadricula[1]:
return False
for i in plantilla:
if i[1]==1 and i[2]==2:
if btn8!=0 and btn12!=0:
if i[0]=="<":
if num>=btn8:
return False
elif i[0]==">":
if num<=btn8:
return False
elif i[0]=="v":
if num<=btn12:
return False
elif i[0]=="˄":
if num>=btn12:
return False
btn7=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton7["text"]=num
boton7["state"]=DISABLED
num=""
return True
def originalC8(num):
global btn8
global cuadricula
for i in cuadricula:
if num==i[3]:
return False
if num in cuadricula[1]:
return False
for i in plantilla:
if i[1]==1 and i[2]==3:
if btn9!=0 and btn13!=0:
if i[0]=="<":
if num>=btn9:
return False
elif i[0]==">":
if num<=btn9:
return False
elif i[0]=="v":
if num<=btn13:
return False
elif i[0]=="˄":
if num>=btn13:
return False
btn8=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton8["text"]=num
boton8["state"]=DISABLED
num=""
return True
def originalC9(num):
global btn9
global cuadricula
for i in cuadricula:
if num==i[4]:
return False
if num in cuadricula[1]:
return False
for i in plantilla:
if i[1]==1 and i[2]==4:
if btn14!=0:
if i[0]=="v":
if num<=btn14:
return False
elif i[0]=="˄":
if num>=btn14:
return False
btn9=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton9["text"]=num
boton9["state"]=DISABLED
num=""
return True
def originalC10(num):
global btn10
global cuadricula
for i in cuadricula:
if num==i[0]:
return False
if num in cuadricula[2]:
return False
for i in plantilla:
if i[1]==2 and i[2]==0:
if btn11!=0 and btn15!=0:
if i[0]=="<":
if num>=btn11:
return False
elif i[0]==">":
if num<=btn11:
return False
elif i[0]=="v":
if num<=btn15:
return False
elif i[0]=="˄":
if num>=btn15:
return False
btn10=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton10["text"]=num
boton10["state"]=DISABLED
num=""
return True
def originalC11(num):
global btn11
global cuadricula
for i in cuadricula:
if num==i[1]:
return False
if num in cuadricula[2]:
return False
for i in plantilla:
if i[1]==2 and i[2]==1:
if btn12!=0 and btn16!=0:
if i[0]=="<":
if num>=btn12:
return False
elif i[0]==">":
if num<=btn12:
return False
elif i[0]=="v":
if num<=btn16:
return False
elif i[0]=="˄":
if num>=btn16:
return False
btn11=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton11["text"]=num
boton11["state"]=DISABLED
num=""
return True
def originalC12(num):
global btn12
global cuadricula
for i in cuadricula:
if num==i[2]:
return False
if num in cuadricula[2]:
return False
for i in plantilla:
if i[1]==2 and i[2]==2:
if btn13!=0 and btn17!=0:
if i[0]=="<":
if num>=btn13:
return False
elif i[0]==">":
if num<=btn13:
return False
elif i[0]=="v":
if num<=btn17:
return False
elif i[0]=="˄":
if num>=btn17:
return False
btn12=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton12["text"]=num
boton12["state"]=DISABLED
num=""
return True
def originalC13(num):
global btn13
global cuadricula
for i in cuadricula:
if num==i[3]:
return False
if num in cuadricula[2]:
return False
for i in plantilla:
if i[1]==2 and i[2]==3:
if btn14!=0 and btn18!=0:
if i[0]=="<":
if num>=btn14:
return False
elif i[0]==">":
if num<=btn14:
return False
elif i[0]=="v":
if num<=btn18:
return False
elif i[0]=="˄":
if num>=btn18:
return False
btn13=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton13["text"]=num
boton13["state"]=DISABLED
num=""
return True
def originalC14(num):
global btn14
global cuadricula
for i in cuadricula:
if num==i[4]:
return False
if num in cuadricula[2]:
return False
for i in plantilla:
if i[1]==2 and i[2]==4:
if btn19!=0:
if i[0]=="v":
if num<=btn19:
return False
elif i[0]=="˄":
if num>=btn19:
return False
btn14=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton14["text"]=num
boton14["state"]=DISABLED
num=""
return True
def originalC15(num):
global btn15
global cuadricula
for i in cuadricula:
if num==i[0]:
return False
if num in cuadricula[3]:
return False
for i in plantilla:
if i[1]==3 and i[2]==0:
if btn16!=0 and btn20!=0:
if i[0]=="<":
if num>=btn16:
return False
elif i[0]==">":
if num<=btn16:
return False
elif i[0]=="v":
if num<=btn20:
return False
elif i[0]=="˄":
if num>=btn20:
return False
btn15=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton15["text"]=num
boton15["state"]=DISABLED
num=""
return True
def originalC16(num):
global btn16
global cuadricula
for i in cuadricula:
if num==i[1]:
return False
if num in cuadricula[3]:
return False
for i in plantilla:
if i[1]==3 and i[2]==1:
if btn17!=0 and btn21!=0:
if i[0]=="<":
if num>=btn17:
return False
elif i[0]==">":
if num<=btn17:
return False
elif i[0]=="v":
if num<=btn21:
return False
elif i[0]=="˄":
if num>=btn21:
return False
btn16=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton16["text"]=num
boton16["state"]=DISABLED
num=""
return True
def originalC17(num):
global btn17
global cuadricula
for i in cuadricula:
if num==i[2]:
return False
if num in cuadricula[3]:
return False
for i in plantilla:
if i[1]==3 and i[2]==2:
if btn18!=0 and btn22!=0:
if i[0]=="<":
if num>=btn18:
return False
elif i[0]==">":
if num<=btn18:
return False
elif i[0]=="v":
if num<=btn22:
return False
elif i[0]=="˄":
if num>=btn22:
return False
btn17=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton17["text"]=num
boton17["state"]=DISABLED
num=""
return True
def originalC18(num):
global btn18
global cuadricula
for i in cuadricula:
if num==i[3]:
return False
if num in cuadricula[3]:
return False
for i in plantilla:
if i[1]==3 and i[2]==3:
if btn19!=0 and btn23!=0:
if i[0]=="<":
if num>=btn18:
return False
elif i[0]==">":
if num<=btn18:
return False
elif i[0]=="v":
if num<=btn23:
return False
elif i[0]=="˄":
if num>=btn23:
return False
btn18=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton18["text"]=num
boton18["state"]=DISABLED
num=""
return True
def originalC19(num):
global btn19
global cuadricula
for i in cuadricula:
if num==i[4]:
return False
if num in cuadricula[3]:
return False
for i in plantilla:
if i[1]==3 and i[2]==4:
if btn24!=0:
if i[0]=="v":
if num<=btn24:
return False
elif i[0]=="˄":
if num>=btn24:
return False
btn19=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton19["text"]=num
boton19["state"]=DISABLED
num=""
return True
def originalC20(num):
global btn20
global cuadricula
for i in cuadricula:
if num==i[0]:
return False
if num in cuadricula[4]:
return False
for i in plantilla:
if i[1]==4 and i[2]==0:
if btn21!=0:
if i[0]=="<":
if num>=btn21:
return False
elif i[0]==">":
if num<=btn21:
return False
btn20=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton20["text"]=num
boton20["state"]=DISABLED
num=""
return True
def originalC21(num):
global btn21
global cuadricula
for i in cuadricula:
if num==i[1]:
return False
if num in cuadricula[4]:
return False
for i in plantilla:
if i[1]==4 and i[2]==1:
if btn22!=0:
if i[0]==">":
if num<=btn22:
return False
elif i[0]=="<":
if num>=btn22:
return False
btn21=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton21["text"]=num
boton21["state"]=DISABLED
num=""
return True
def originalC22(num):
global btn22
global cuadricula
for i in cuadricula:
if num==i[2]:
return False
if num in cuadricula[4]:
return False
for i in plantilla:
if i[1]==4 and i[2]==2:
if btn23!=0:
if i[0]=="<":
if num>=btn23:
return False
elif i[0]==">":
if num<=btn23:
return False
btn22=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton22["text"]=num
boton22["state"]=DISABLED
num=""
return True
def originalC23(num):
global btn23
global cuadricula
for i in cuadricula:
if num==i[3]:
return False
if num in cuadricula[4]:
return False
for i in plantilla:
if i[1]==4 and i[2]==3:
if btn24!=0:
if i[0]=="<":
if num>=btn24:
return False
elif i[0]==">":
if num<=btn24:
return False
btn23=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton23["text"]=num
boton23["state"]=DISABLED
num=""
return True
def originalC24(num):
global btn24
global cuadricula
for i in cuadricula:
if num==i[4]:
return False
if num in cuadricula[4]:
return False
btn24=num
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
boton24["text"]=num
boton24["state"]=DISABLED
num=""
return True
def ganarCOPY(cuadricula):
ceros=0
for i in cuadricula:
for j in i:
if j==0 or j=="0" or j=="":
ceros+=1
if ceros==0:
return True
else:
return False
while gano==False:
btn0=0
btn1=0
btn2=0
btn3=0
btn4=0
btn5=0
btn6=0
btn7=0
btn8=0
btn9=0
btn10=0
btn11=0
btn12=0
btn13=0
btn14=0
btn15=0
btn16=0
btn17=0
btn18=0
btn19=0
btn20=0
btn21=0
btn22=0
btn23=0
btn24=0
cuadricula=[[btn0,btn1,btn2,btn3,btn4],[btn5,btn6,btn7,btn8,btn9],[btn10,btn11,btn12,btn13,btn14],[btn15,btn16,btn17,btn18,btn19],[btn20,btn21,btn22,btn23,btn24]]
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC0(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC1(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC2(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC3(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[0]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC4(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC5(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC6(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC7(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC8(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[1]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC9(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC10(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC11(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC12(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC13(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[2]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC14(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC15(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC16(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC17(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC18(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[3]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC19(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[0])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC20(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[1])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC21(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[2])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC22(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[3])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC23(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
listadenumeros=["1","2","3","4","5"]
nuevalista=[]
listavertical=[]
for i in cuadricula:
listavertical.append(i[4])
listahorizontal=cuadricula[4]
for j in listadenumeros:
if (j not in listavertical) and (j not in listahorizontal) :
nuevalista.append(j)
if nuevalista!=[]:
num=random.choice(nuevalista)
else:
num=""
while originalC24(num)!=True and nuevalista!=[]:
nuevalista.remove(num)
num=random.choice(nuevalista)
gano=ganarCOPY(cuadricula)
Boton1_J["state"]=DISABLED
Boton2_J["state"]=DISABLED
Boton3_J["state"]=DISABLED
Boton4_J["state"]=DISABLED
Boton5_J["state"]=DISABLED
Boton6_J["state"]=DISABLED
Boton0_num["state"]=DISABLED
Boton1_num["state"]=DISABLED
Boton2_num["state"]=DISABLED
Boton3_num["state"]=DISABLED
Boton4_num["state"]=DISABLED
Botonsugerencia["state"]=DISABLED
Mensaje_ganar=Message(Ventana_J,text="JUEGO TERMINADO CON ÉXITO",width='885',font=("Comic Sans",40),bg="#C2D8FB",fg="black")
Mensaje_ganar.place(x=10,y=475)
btfin=Button(Ventana_J,text="FIN",width='11',height='3',command=cerrarCOPY)
btfin.place(x=300,y=200)
#da sugerencias posibles para una casilla
def sugerencia():
global sugerido
if sugerido==0:
sugerido=1
Botonsugerencia["text"]="Posibles Jugadas: Encendido"
Botonsugerencia["bg"]="green"
Boton0_num["state"]=DISABLED
Boton1_num["state"]=DISABLED
Boton2_num["state"]=DISABLED
Boton3_num["state"]=DISABLED
Boton4_num["state"]=DISABLED
else:
sugerido=0
Botonsugerencia["text"]="Posibles Jugadas: Apagado"
Botonsugerencia["bg"]="white"
Boton0_num["state"]="normal"
Boton1_num["state"]="normal"
Boton2_num["state"]="normal"
Boton3_num["state"]="normal"
Boton4_num["state"]="normal"
#botones
Boton1_J=Button(Ventana_J,text="Borrar Jugada",width='13',height='3',font=("Arial",15),bg='#EF9A54',fg="black",state=Estado,command=anterior)
Boton1_J.place(x=25,y=520)
Boton2_J=Button(Ventana_J,text="Borrar Juego",width='13',height='3',font=("Arial",15),bg='grey',fg="black",state=Estado,command=borra_todo)
Boton2_J.place(x=225,y=520)
Boton3_J=Button(Ventana_J,text="Terminar Juego",width='13',height='3',font=("Arial",15),bg='#C50000',fg="black",state=Estado,command=terminar)
Boton3_J.place(x=425,y=520)
Boton4_J=Button(Ventana_J,text="Guardar Juego",width='13',height='3',font=("Arial",15),bg='#8E9CFF',fg="black",state=Estado,command=lambda:guardar(relojC))
Boton4_J.place(x=625,y=520)
Boton5_J=Button(Ventana_J,text="Rehacer Jugada",width='13',height='3',font=("Arial",15),bg='#BFE1BB',fg="black",state=Estado,command=rehacer)
Boton5_J.place(x=425,y=620)
Boton6_J=Button(Ventana_J,text="Solucionar Juego",width='13',height='3',font=("Arial",15),bg='grey',fg="black",state=Estado,command=solucionar)
Boton6_J.place(x=625,y=620)
#columna de numeros
if lado==2:
Boton0_num=Button(Ventana_J,text=uno,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(uno))
Boton0_num.place(x=700,y=170)
Boton1_num=Button(Ventana_J,text=dos,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(dos))
Boton1_num.place(x=700,y=220)
Boton2_num=Button(Ventana_J,text=tres,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(tres))
Boton2_num.place(x=700,y=270)
Boton3_num=Button(Ventana_J,text=cuatro,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(cuatro))
Boton3_num.place(x=700,y=320)
Boton4_num=Button(Ventana_J,text=cinco,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(cinco))
Boton4_num.place(x=700,y=370)
Botonsugerencia=Button(Ventana_J,text="Posibles Jugadas: Apagado",width='23',height='2',font=("Arial",8),bg='white',fg="black",state=Estado,command=sugerencia)
Botonsugerencia.place(x=10,y=270)
else:
Boton0_num=Button(Ventana_J,text=uno,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(uno))
Boton0_num.place(x=50,y=170)
Boton1_num=Button(Ventana_J,text=dos,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(dos))
Boton1_num.place(x=50,y=220)
Boton2_num=Button(Ventana_J,text=tres,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(tres))
Boton2_num.place(x=50,y=270)
Boton3_num=Button(Ventana_J,text=cuatro,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(cuatro))
Boton3_num.place(x=50,y=320)
Boton4_num=Button(Ventana_J,text=cinco,width='3',height='1',font=("Arial",15),bg='white',fg="black",state=Estado,command=lambda:numero(cinco))
Boton4_num.place(x=50,y=370)
Botonsugerencia=Button(Ventana_J,text="Posibles Jugadas: Apagado",width='23',height='2',font=("Arial",8),bg='white',fg="black",state=Estado,command=sugerencia)
Botonsugerencia.place(x=630,y=270)
Ventana_J.mainloop()
#carga la partida
def cargar():
filesize=os.path.getsize("futoshiki2020juegoactual.dat")
if filesize==0:
messagebox.showerror(message="Error,no hay partida guardadas")
else:
x=open("futoshiki2020juegoactual.dat","rb")
cosa=pickle.load(x)
x.close()
jugar(cosa[0],cosa[1],cosa[2],cosa[3],cosa[4])
#muestra el top 10
def top():
n=0
j=25
p=10
q=open("futoshiki2020top10.dat","rb")
top10=pickle.load(q)
q.close()
if len(top10)>10:
top10=top10[0:10]
listatemporal=[]
nuevotop10=[]
for i in top10:
if isinstance(i,tuple):
listatemporal.append(i[1])
else:
top10.remove(i)
listatemporal.sort()
while listatemporal!=[]:
for i in top10:
if isinstance(i,tuple):
if i[1]==listatemporal[0]:
nuevotop10.append(i)
listatemporal=listatemporal[1:]
if listatemporal==[]:
break
else:
top10.remove(i)
top10=nuevotop10
Ventana_Top=Tk()
Ventana_Top.geometry('500x500+200+300')
Ventana_Top.title('Futoshiki')
Ventana_Top.config(bg='beige')
Ventana_Top.resizable(width= False, height=False)
w=10
pdf=FPDF()
pdf.add_page()
pdf.set_font("Arial","B",24)
for i in top10:
if isinstance(i,int):
file = open("futoshiki2020top10.dat","r+")
file.truncate(0)
file.close()
d=open("futoshiki2020top10.dat","wb")
pickle.dump(top10,d)
d.close()
else:
n+=1
Mensaje_Top=Label(Ventana_Top,font=("Arial",10),bg='beige')
cosa=str(n)+") "+str(i[0])+" "+str(i[1])
Mensaje_Top["text"]=cosa
Mensaje_Top.place(x=j,y=p)
pdf.cell(10,w,cosa)
w+=18
j+=25
p+=25
pdf.output("top10futoshiki2020.pdf","F")
def ok():
Ventana_Top.destroy()
def imprime():
os.startfile("top10futoshiki2020.pdf")
btpdf=Button(Ventana_Top,text="imprimir",width='7',height='2',command=imprime)
btpdf.place(x=100,y=450)
btok=Button(Ventana_Top,text="ok",width='7',height='2',command=ok)
btok.place(x=300,y=450)
#botones
Boton1=Button(Ventana_C,text="Iniciar Juego",width='11',height='3',font=("Arial",15),bg='red',fg="black",command=lambda:jugar(Nombre_text.get(),var.get(),var2.get(),var3.get(),continuo))
Boton1.place(x=160,y=300)
Boton2=Button(Ventana_C,text="Cargar Juego",width='11',height='3',font=("Arial",15),bg='#A1FBD5',fg="black",command=cargar)
Boton2.place(x=310,y=300)
Boton3=Button(Ventana_C,text="Top 10",width='11',height='3',font=("Arial",15),bg='#DBEB1C',fg="black",command=top)
Boton3.place(x=460,y=300)
def manual():#abre el manual
os.startfile("Esteban_Granda_Urbina_manual_de_usuario_futoshiki_v2.pdf")
Boton4=Button(Ventana_C,text="Ayuda",width='11',height='3',font=("Arial",15),bg='grey',fg="black",command=manual)
Boton4.place(x=160,y=400)
def info():#muestra la informacion del programa
messagebox.showinfo("Acerca de","""
Nombre del programa: Futoshiki
Version: 1.0.0
Fecha de creaión: 20/07/20
Autor: <NAME>
""")
Boton5=Button(Ventana_C,text="Acerca de",width='11',height='3',font=("Arial",15),bg='grey',fg="black",command=info)
Boton5.place(x=310,y=400)
def cerrar():#cierra la ventana
Ventana_C.destroy()
Boton6=Button(Ventana_C,text="Salir",width='11',height='3',font=("Arial",15),bg='grey',fg="black",command=cerrar)
Boton6.place(x=460,y=400)
Ventana_C.mainloop()
VentanaPrincipal()
|
StarcoderdataPython
|
6701935
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 11 17:10:49 2020
@author: <NAME>
In this code a Hamiltonian Neural Network is designed and employed
to solve a system of four differential equations obtained by Hamilton's
equations for the the Hamiltonian of Henon-Heiles chaotic dynamical.
"""
import numpy as np
import torch
import torch.optim as optim
from torch.autograd import grad
import matplotlib.pyplot as plt
import time
import copy
from scipy.integrate import odeint
dtype=torch.float
# Define the sin() activation function
class mySin(torch.nn.Module):
@staticmethod
def forward(input):
return torch.sin(input)
# Use below in the Scipy Solver
def f(u, t ,lam=1):
x, y, px, py = u # unpack current values of u
derivs = [px, py, -x -2*lam*x*y, -y -lam*(x**2-y**2) ] # list of dy/dt=f functions
return derivs
# Scipy Solver
def HHsolution(N,t, x0, y0, px0, py0,lam=1):
u0 = [x0, y0, px0, py0]
# Call the ODE solver
solPend = odeint(f, u0, t, args=(lam,))
xP = solPend[:,0]; yP = solPend[:,1];
pxP = solPend[:,2]; pyP = solPend[:,3]
return xP,yP, pxP, pyP
# Energy of nonlinear oscillator
def energy(x, y, px, py, lam=1):
Nx=len(x);
x=x.reshape(Nx); y=y.reshape(Nx)
px=px.reshape(Nx); py=py.reshape(Nx)
E = 0.5*(px**2 + py**2) + 0.5*(x**2+y**2)+lam*(x**2 *y - y**3/3)
E = E.reshape(Nx)
return E
# initial energy
def HH_exact(N,x0, y0, vx0, vy0, lam):
E0 = 0.5*(vx0**2+vy0**2) + 0.5*(x0**2+y0**2)+lam*(x0**2 *y0 - y0**3/3)
E_ex = E0*np.ones(N+1);
return E0, E_ex
# Set the initial state. lam controls the nonlinearity
x0, y0, px0, py0, lam = 0.3,-0.3, 0.3, 0.15, 1;
t0, t_max, N = 0.,6*np.pi, 200; dt = t_max/N;
X0 = [t0, x0, y0, px0, py0, lam]
t_num = np.linspace(t0, t_max, N+1)
E0, E_ex = HH_exact(N,x0, y0, px0, py0, lam)
x_num, y_num, px_num, py_num = HHsolution(N,t_num, x0, y0, px0, py0, lam)
# E_num = energy( x_ex, y_ex, vx_ex, vy_ex, lam)
#####################################
# Hamiltonian Neural Network
####################################
# Define some more general functions
def dfx(x,f):
# Calculate the derivatice with auto-differention
return grad([f], [x], grad_outputs=torch.ones(x.shape, dtype=dtype), create_graph=True)[0]
def perturbPoints(grid,t0,tf,sig=0.5):
# stochastic perturbation of the evaluation points
# force t[0]=t0 & force points to be in the t-interval
delta_t = grid[1] - grid[0]
noise = delta_t * torch.randn_like(grid)*sig
t = grid + noise
t.data[2] = torch.ones(1,1)*(-1)
t.data[t<t0]=t0 - t.data[t<t0]
t.data[t>tf]=2*tf - t.data[t>tf]
t.data[0] = torch.ones(1,1)*t0
t.requires_grad = False
return t
def saveData(path, t, x,y, px,py, E, loss):
np.savetxt(path+"t.txt",t)
np.savetxt(path+"x.txt",x)
np.savetxt(path+"y.txt",y)
np.savetxt(path+"px.txt",px)
np.savetxt(path+"py.txt",py)
np.savetxt(path+"E.txt",E)
np.savetxt(path+"Loss.txt",loss)
# Define some functions used by the Hamiltonian network
def parametricSolutions(t, nn, X0):
# parametric solutions
t0, x0, y0, px0, py0, lam = X0[0],X0[1],X0[2],X0[3],X0[4],X0[5]
N1, N2, N3, N4 = nn(t)
dt =t-t0
#### THERE ARE TWO PARAMETRIC SOLUTIONS. Uncomment f=dt
f = (1-torch.exp(-dt))
# f=dt
x_hat = x0 + f*N1
y_hat = y0 + f*N2
px_hat = px0 + f*N3
py_hat = py0 + f*N4
return x_hat, y_hat, px_hat, py_hat
def hamEqs_Loss(t,x,y,px,py,lam):
# Define the loss function by Hamilton Eqs., write explicitely the Ham. Equations
xd,yd,pxd,pyd= dfx(t,x),dfx(t,y),dfx(t,px),dfx(t,py)
fx = xd - px;
fy = yd - py;
fpx = pxd + x + 2.*lam*x*y
fpy = pyd + y + lam*(x.pow(2) - y.pow(2))
Lx = (fx.pow(2)).mean(); Ly = (fy.pow(2)).mean();
Lpx = (fpx.pow(2)).mean(); Lpy = (fpy.pow(2)).mean();
L = Lx + Ly + Lpx + Lpy
return L
def hamEqs_Loss_byH(t,x,y,px,py,lam):
# This is an alternative way to define the loss function:
# Define the loss function by Hamilton Eqs. directly from Hamiltonian H
#
# Potential and Kinetic Energy
V = 0.5*(x.pow(2) + y.pow(2)) + lam*(x.pow(2)*y - y.pow(3)/3)
K = 0.5*(px.pow(2)+py.pow(2))
ham = K + V
xd,yd,pxd,pyd= dfx(t,x),dfx(t,y),dfx(t,px),dfx(t,py)
# calculate the partial spatial derivatives of H
hx = grad([ham], [x], grad_outputs=torch.ones(x.shape, dtype=dtype), create_graph=True)[0]
hy = grad([ham], [y], grad_outputs=torch.ones(y.shape, dtype=dtype), create_graph=True)[0]
hpx = grad([ham], [px], grad_outputs=torch.ones(px.shape, dtype=dtype), create_graph=True)[0]
hpy = grad([ham], [py], grad_outputs=torch.ones(py.shape, dtype=dtype), create_graph=True)[0]
# Hamilton Eqs
fx = xd - hpx; fy = yd - hpy
fpx = pxd + hx; fpy = pyd + hy
Lx = (fx.pow(2)).mean(); Ly = (fy.pow(2)).mean();
Lpx = (fpx.pow(2)).mean(); Lpy = (fpy.pow(2)).mean();
L = Lx + Ly + Lpx + Lpy
return L
def hamiltonian_Loss(t,x,y,px,py,lam):
# Define the loss function as the time derivative of the hamiltonian
xd,yd,pxd,pyd= dfx(t,x),dfx(t,y),dfx(t,px),dfx(t,py)
ham = 0.5*(px.pow(2)+py.pow(2)+x.pow(2)+y.pow(2))+lam*(x.pow(2)*y-y.pow(3)/3)
hx = grad([ham], [x], grad_outputs=torch.ones(x.shape, dtype=dtype), create_graph=True)[0]
hy = grad([ham], [y], grad_outputs=torch.ones(y.shape, dtype=dtype), create_graph=True)[0]
hpx = grad([ham], [px], grad_outputs=torch.ones(px.shape, dtype=dtype), create_graph=True)[0]
hpy = grad([ham], [py], grad_outputs=torch.ones(py.shape, dtype=dtype), create_graph=True)[0]
ht = hx*xd + hy*yd + hpx*pxd + hpy*pyd
L = (ht.pow(2)).mean()
return L
# NETWORK ARCHITECTURE
# A two hidden layer NN, 1 input & two output
class odeNet_HH_MM(torch.nn.Module):
def __init__(self, D_hid=10):
super(odeNet_HH_MM,self).__init__()
# Define the Activation
# self.actF = torch.nn.Sigmoid()
self.actF = mySin()
# define layers
self.Lin_1 = torch.nn.Linear(1, D_hid)
self.Lin_2 = torch.nn.Linear(D_hid, D_hid)
self.Lin_out = torch.nn.Linear(D_hid, 4)
def forward(self,t):
# layer 1
l = self.Lin_1(t); h = self.actF(l)
# layer 2
l = self.Lin_2(h); h = self.actF(l)
# output layer
r = self.Lin_out(h)
xN = (r[:,0]).reshape(-1,1); yN = (r[:,1]).reshape(-1,1)
pxN = (r[:,2]).reshape(-1,1); pyN = (r[:,3]).reshape(-1,1)
return xN, yN, pxN, pyN
# Train the NN
def run_odeNet_HH_MM(X0, tf, neurons, epochs, n_train,lr,
minibatch_number = 1):
fc0 = odeNet_HH_MM(neurons)
fc1=0; # fc1 will be a deepcopy of the network with the lowest training loss
# optimizer
betas = [0.999, 0.9999]
optimizer = optim.Adam(fc0.parameters(), lr=lr, betas=betas)
Loss_history = []; Llim = 1
t0=X0[0];
grid = torch.linspace(t0, tf, n_train).reshape(-1,1)
## TRAINING ITERATION
TeP0 = time.time()
for tt in range(epochs):
# Perturbing the evaluation points & forcing t[0]=t0
t=perturbPoints(grid,t0,tf,sig=.03*tf)
# BATCHING
batch_size = int(n_train/minibatch_number)
batch_start, batch_end = 0, batch_size
idx = np.random.permutation(n_train)
t_b = t[idx]
t_b.requires_grad = True
loss=0.0
for nbatch in range(minibatch_number):
# batch time set
t_mb = t_b[batch_start:batch_end]
# Network solutions
x,y,px,py =parametricSolutions(t_mb,fc0,X0)
# LOSS
# Loss function defined by Hamilton Eqs. (symplectic): Writing explicitely the Eqs (faster)
Ltot = hamEqs_Loss(t_mb,x,y,px,py,lam)
# Loss function defined by Hamilton Eqs. (symplectic): Calculating with auto-diff the Eqs (slower)
# Ltot = hamEqs_Loss_byH(t_mb,x,y,px,py,lam)
# Alternatively, Loss function defined by Hamiltonian (slower)
# if tt>1e3:
# Ltot += hamiltonian_Loss(t_mb,x,y,px,py,lam)
# OPTIMIZER
Ltot.backward(retain_graph=False); #True
optimizer.step(); loss += Ltot.data.numpy()
optimizer.zero_grad()
batch_start +=batch_size
batch_end +=batch_size
# keep the loss function history
Loss_history.append(loss)
#Keep the best model (lowest loss) by using a deep copy
if tt > 0.8*epochs and Ltot < Llim:
fc1 = copy.deepcopy(fc0)
Llim=Ltot
TePf = time.time()
runTime = TePf - TeP0
return fc1, Loss_history, runTime
###
## TRAIN THE NETWORK
n_train, neurons, epochs, lr,mb = 100, 50, int(3e4), 8e-3, 1
model,loss,runTime = run_odeNet_HH_MM(X0, t_max,
neurons, epochs, n_train,lr,mb)
# Loss function
print('Training time (minutes):', runTime/60)
plt.loglog(loss,'-b',alpha=0.975);
plt.tight_layout()
plt.ylabel('Loss');plt.xlabel('t')
#plt.savefig('../results/HenonHeiles_loss.png')
plt.savefig('HenonHeiles_loss.png')
# TEST THE PREDICTED SOLUTIONS
nTest = n_train; tTest = torch.linspace(t0,t_max,nTest)
tTest = tTest.reshape(-1,1);
tTest.requires_grad=True
t_net = tTest.detach().numpy()
x,y,px,py =parametricSolutions(tTest,model,X0)
x=x.data.numpy(); y=y.data.numpy()
px=px.data.numpy(); py=py.data.numpy()
E = energy(x, y, px, py, lam)
###################
# Symplectic Euler
####################
def symEuler(Ns, x0,px0,t_max,lam):
t_s = np.linspace(t0, t_max, Ns+1)
dts = t_max/Ns
x_s = np.zeros(Ns+1); px_s = np.zeros(Ns+1);
y_s = np.zeros(Ns+1); py_s = np.zeros(Ns+1)
x_s[0], px_s[0], y_s[0], py_s[0] = x0, px0,y0, py0
for n in range(Ns):
x_s[n+1] = x_s[n] + dts*px_s[n]
y_s[n+1] = y_s[n] + dts*py_s[n]
px_s[n+1] = px_s[n] - dts*(x_s[n+1] + 2*lam*x_s[n+1]*y_s[n+1])
py_s[n+1] = py_s[n] - dts*(y_s[n+1] + lam*(x_s[n+1]**2-y_s[n+1]**2))
E_euler = energy( x_s, y_s, px_s, py_s, lam)
return E_euler, x_s,y_s, px_s, py_s, t_s
Ns = n_train;
E_s, x_s, y_s, px_s, py_s, t_s = symEuler(Ns, x0,px0,t_max,lam)
Ns100 = 100*n_train ;
E_s100, x_s100,y_s100, px_s100,py_s100, t_s100 = symEuler(Ns100, x0,px0,t_max,lam)
################
# Make the plots
#################
# Figure for trajectories: x(t), p(t), energy in time E(t),
# and phase space trajectory p(x)
lineW = 2 # Line thickness
plt.figure(figsize=(10,8))
plt.subplot(2,2,1)
plt.plot(t_num,x_num,'-g',linewidth=lineW, label='Ground truth');
plt.plot(t_net, x,'--b', label='Neural Net');
plt.plot(t_s,x_s,':k',linewidth=lineW, label='Symplectic Euler');
plt.plot(t_s100,x_s100,'-.r',linewidth=lineW, label='Symplectic Euler x 100 points');
plt.ylabel('x');plt.xlabel('t')
plt.legend()
plt.subplot(2,2,2)
plt.plot(t_num,E_ex,'-g',linewidth=lineW);
plt.plot(t_net, E,'--b')
plt.plot(t_s,E_s,':k',linewidth=lineW);
plt.plot(t_s100,E_s100,'-.r',linewidth=lineW);
plt.ylabel('E');plt.xlabel('t')
plt.ylim([1.1*E0,0.9*E0])
plt.subplot(2,2,3)
plt.plot(t_num,px_num,'-g',linewidth=lineW);
plt.plot(t_net, px,'--b')
plt.plot(t_s,px_s,':k',linewidth=lineW);
plt.plot(t_s100,px_s100,'-.r',linewidth=lineW);
plt.ylabel('px');plt.xlabel('t')
plt.subplot(2,2,4)
plt.plot(x_num,px_num,'-g',linewidth=lineW);
plt.plot(x, px,'--b')
plt.plot(x_s,px_s,'--k',linewidth=lineW);
plt.plot(x_s100,px_s100,'-.r',linewidth=lineW);
plt.ylabel('px');plt.xlabel('x');
#plt.savefig('../results/HenonHeiles_trajectories.png')
plt.savefig('HenonHeiles_trajectories.png')
|
StarcoderdataPython
|
8189165
|
from .actor_critic import Actor, Critic
|
StarcoderdataPython
|
5176922
|
<filename>addresses/IANA.py<gh_stars>0
#!/usr/bin/env python
class Networks(object):
def __init__(self, url=None):
self.networks = {
'0.0.0.0/8': 'IANA - Local Identification',
'10.0.0.0/8': 'IANA - Private Use',
'172.16.31.10/10': 'IANA - Shared Address Space',
'127.0.0.0/8': 'IANA - Loopback',
'169.254.0.0/16': 'Dynamic Configuration of IPv4 Link-Local Addresses',
'172.16.0.0/12': 'IANA - Private Use',
'192.0.0.0/24': 'IANA - IPv4 Special Purpose Address Registry',
'192.0.2.0/24': 'IANA - Documentation',
'192.168.3.11/24': 'IANA - Reserved',
'192.168.127.12/24': 'IANA - Reserved',
'192.168.127.12/24': 'IANA - Reserved',
'192.168.0.0/16': 'IANA - Private Use',
'172.16.17.32/24': 'IANA - Reserved',
'198.18.0.0/15': 'IANA - Benchmarking Methodology for Network Interconnect Devices',
'198.51.100.0/24': 'IANA - Documentation',
'203.0.113.0/24': 'IANA - Documentation',
'172.16.17.32/4': 'Multicast',
'240.0.0.0/4': 'Reserved for future use'
}
def get(self):
return self.networks
|
StarcoderdataPython
|
323943
|
<filename>tests/integration/misc/test_generation.py
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
app = """
library(dash)
library(dashGeneratorTestComponentNested)
library(dashGeneratorTestComponentStandard)
app <- Dash$new()
app$layout(html$div(list(
dgtc_standardMyStandardComponent(id="standard", value="Standard"),
dgtc_nestedMyNestedComponent(id="nested", value="Nested")
)))
app$run_server()
"""
styled_app = """
library(dash)
library(dashGeneratorTestComponentStandard)
app <- Dash$new()
app$layout(html$div(list(
html$button(id='btn', list('Click')),
html$div(id='container')
)))
app$callback(output(id = 'container', property = 'children'),
list(input(id = 'btn', property = 'n_clicks')),
function(n_clicks) {
if (is.null(unlist(n_clicks))) {
return(dashNoUpdate())
} else {
return(list(dgtc_standardMyStandardComponent(id="standard", value="Standard", style=list(fontFamily="godfather"))))
}
})
app$run_server()
"""
def test_gene001_simple_callback(dashr):
dashr.start_server(app)
assert dashr.wait_for_element("#standard").text == "Standard"
assert dashr.wait_for_element("#nested").text == "Nested"
dashr.percy_snapshot("gene001-simple-callback")
def test_gene002_arbitrary_resources(dashr):
dashr.start_server(styled_app)
assert (
dashr.driver.execute_script("return document.fonts.check('1em godfather')")
is False
)
dashr.wait_for_element("#btn").click()
assert dashr.wait_for_element("#standard").text == "Standard"
WebDriverWait(dashr.driver, 10).until(
lambda _: dashr.driver.execute_script("return document.fonts.check('1em godfather')") is True,
)
dashr.percy_snapshot("gene002-arbitrary-resource")
|
StarcoderdataPython
|
5043221
|
from __future__ import division, print_function, absolute_import
import tensorflow as tf
from tf_layers import *
from tf_selu import selu
import math
def getNetwork(t):
print("using AE", t)
if t == 'wide':
return Regression_Wide
if t == 'encoded':
return Regression_Encoded
if t == 'chem':
return AutoEncoder_Chem
if t == 'sig':
return AutoEncoder_Chem_Sigmoid
if t == 'ecfp':
return AutoEncoder_Chem_ECFP
if t == 'ecfp_sig':
return AutoEncoder_Chem_ECFP_sig
if t == 'ecfp_sig_bn':
return AutoEncoder_Chem_ECFP_sig_bn
if t == 'flat':
return AutoEncoder_Chem_Flat
if t == 'ecfp_two':
return AutoEncoder_ECFP_Two
if t == 'ecfp_three':
return AutoEncoder_ECFP_Three
if t == 'ecfp_five':
return AutoEncoder_ECFP_Five
if t == 'ecfp_three_bn':
return AutoEncoder_ECFP_Three_BN
if t == 'ecfp_skinny_bn':
return AutoEncoder_ECFP_Skinny_BN
if t == 'ecfp_selu':
return AutoEncoder_ECFP_SELU
if t == 'ecfp_selu_five':
return AutoEncoder_ECFP_SELU_Five
if t == 'ecfp_selu_two':
return AutoEncoder_ECFP_SELU_Two
if t == 'rnaseq_selu':
return RNASEQ_SELU
if t == 'rnaseq_selu_big':
return RNASEQ_SELU_big
if t == 'rnaseq_relu_big':
return RNASEQ_RELU_big
if t == 'rnaseq_sig_big':
return RNASEQ_Sig_big
if t == 'rnaseq_selu_bigger':
return RNASEQ_SELU_bigger
if t == 'rnaseq_selu_sq':
return RNASEQ_SELU_sq
if t == 'rnaseq_selu_1k':
return RNASEQ_SELU_1k
if t == 'rnaseq_sq':
return RNASEQ_sq
if t == 'lbexp_selu':
return LBEXP_SELU
if t == 'lbexp_relu':
return LBEXP_RELU
if t == 'tox_relu':
return TOX_RELU
if t == 'tox_relu_reg':
return TOX_RELU_REG
if t == 'rnaseq_selu_big':
return RNASEQ_SELU_big
if t == 'fang_relu_dragon':
return FANG_RELU_DRAGON
if t == 'fang_relu_gene':
return FANG_RELU_GENE
print("unrecognized autoencoder type", t)
class Regression_Wide:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = batch_normalized_linear_layer(x, "fc1", 2000, stddev=1, wd=.004, test=test)
fc2 = batch_normalized_linear_layer(fc1, "fc2", 500, stddev=1, wd=.004, test=test)
fc3 = batch_normalized_linear_layer(fc2, "fc3", 100, stddev=1, wd=.004, test=test)
fc3_out = linear_layer(fc3, 'fc3_out', 1, stddev=1, wd=.004, nonlinearity=None)
return fc3_out
class Regression_Encoded:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
#fc1 = batch_normalized_linear_layer(x, "fc1", 50, stddev=1, wd=.004, test=test)
#fc2 = batch_normalized_linear_layer(x, "fc2", 25, stddev=1, wd=.004, test=test)
#fc3 = batch_normalized_linear_layer(fc2, "fc3", 12, stddev=1, wd=.004, test=test)
fc3_out = linear_layer(x, 'fc3_out', 1, stddev=1, wd=.004, nonlinearity=None)
return fc3_out
class AutoEncoder_Chem_Sigmoid:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("using the correct AE class")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 2000, stddev=.005, wd=.004, nonlinearity=tf.sigmoid)
fc2 = linear_layer(fc1, "fc2", 500, stddev=.005, wd=.004, nonlinearity=tf.sigmoid)
fc3_out = linear_layer(fc2, "fc3_3", 100, stddev=.005, wd=.004, nonlinearity=tf.sigmoid)
return fc3_out
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 500, stddev=.005, wd=.004, nonlinearity=tf.sigmoid)
de_fc2 = linear_layer(de_fc1, "de_fc2", 2000, stddev=.005, wd=.004, nonlinearity=tf.sigmoid)
de_fc3_out = linear_layer(de_fc2, 'de_fc3_out', self.width, stddev=.005, wd=.004)
return de_fc3_out
class AutoEncoder_Chem:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = batch_normalized_linear_layer(x, "fc1", 2000, stddev=.005, wd=.004, test=test)
fc2 = batch_normalized_linear_layer(fc1, "fc2", 500, stddev=.005, wd=.004, test=test)
fc3_out = linear_layer(fc2, "fc3_3", 100, stddev=.005, wd=.004)
return fc3_out
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = batch_normalized_linear_layer(x, "de_fc1", 500, stddev=.005, wd=.004, test=test)
de_fc2 = batch_normalized_linear_layer(de_fc1, "de_fc2", 2000, stddev=.005, wd=.004, test=test)
de_fc3_out = linear_layer(de_fc2, 'de_fc3_out', self.width, stddev=.005, wd=.004)
return de_fc3_out
class AutoEncoder_Chem_Flat:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("using flat network")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 100, stddev=.005, wd=.004)
return fc1
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = linear_layer(x, 'de_fc1', self.width, stddev=.005, wd=.004)
return de_fc1
class AutoEncoder_Chem_ECFP:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 2000, stddev=.01, wd=.004)
fc2 = linear_layer(fc1, "fc2", 1000, stddev=.01, wd=.004)
fc3 = linear_layer(fc2, "fc3", 500, stddev=.01, wd=.004)
fc4 = linear_layer(fc3, "fc4", 250, stddev=.01, wd=.004)
fc5_out = linear_layer(fc4, "fc5_out", 100, stddev=.01, wd=.004)
return fc5_out
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 250, stddev=.01, wd=.004)
de_fc2 = linear_layer(de_fc1, "de_fc2", 500, stddev=.01, wd=.004)
de_fc3 = linear_layer(de_fc2, "de_fc3", 1000, stddev=.01, wd=.004)
de_fc4 = linear_layer(de_fc3, "de_fc4", 2000, stddev=.01, wd=.004)
de_fc5_out = linear_layer(de_fc4, 'de_fc5_out', self.width, stddev=.01, wd=.004)
return de_fc5_out
def intense_sigmoid(X, name=None):
return tf.nn.sigmoid(1*X)
class AutoEncoder_Chem_ECFP_sig:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_sig")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 2000, stddev=.01, wd=.004)
fc2 = linear_layer(fc1, "fc2", 1000, stddev=.01, wd=.004)
fc3 = linear_layer(fc2, "fc3", 500, stddev=.01, wd=.004)
fc4 = linear_layer(fc3, "fc4", 250, stddev=.01, wd=.004)
fc5_out = linear_layer(fc4, "fc5_out", 100, stddev=.01, wd=.004)
return fc5_out
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 250, stddev=.01, wd=.004)
de_fc2 = linear_layer(de_fc1, "de_fc2", 500, stddev=.01, wd=.004)
de_fc3 = linear_layer(de_fc2, "de_fc3", 1000, stddev=.01, wd=.004)
de_fc4 = linear_layer(de_fc3, "de_fc4", 2000, stddev=.01, wd=.004)
de_fc5_out = linear_layer(de_fc4, 'de_fc5_out', self.width, stddev=.01, wd=.004, nonlinearity=intense_sigmoid)
return de_fc5_out
class AutoEncoder_Chem_ECFP_sig_bn:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_sig_bn")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x):
test = not self.is_training
fc1 = batch_normalized_linear_layer(x, "fc1", 2000, stddev=.01, wd=.004, test=test)
fc2 = batch_normalized_linear_layer(fc1, "fc2", 1000, stddev=.01, wd=.004, test=test)
fc3 = batch_normalized_linear_layer(fc2, "fc3", 500, stddev=.01, wd=.004, test=test)
fc4 = batch_normalized_linear_layer(fc3, "fc4", 250, stddev=.01, wd=.004, test=test)
fc5_out = linear_layer(fc4, "fc5_out", 100, stddev=.01, wd=.004)
return fc5_out
# Building the decoder
def decoder(self, x):
test = not self.is_training
de_fc1 = batch_normalized_linear_layer(x, "de_fc1", 250, stddev=.01, wd=.004, test=test)
de_fc2 = batch_normalized_linear_layer(de_fc1, "de_fc2", 500, stddev=.01, wd=.004, test=test)
de_fc3 = batch_normalized_linear_layer(de_fc2, "de_fc3", 1000, stddev=.01, wd=.004, test=test)
de_fc4 = batch_normalized_linear_layer(de_fc3, "de_fc4", 2000, stddev=.01, wd=.004, test=test)
de_fc5_out = linear_layer(de_fc4, 'de_fc5_out', self.width, stddev=.01, wd=.004, nonlinearity=intense_sigmoid)
return de_fc5_out
class AutoEncoder_ECFP_Two:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_two")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 100, stddev=.04, wd=None)
fc2_out = linear_layer(fc1, "fc2_out", 100, stddev=.04, wd=None)
return fc2_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 100, stddev=.04, wd=None)
de_fc2_out = linear_layer(de_fc1, 'de_fc2_out', self.width, stddev=.04, \
wd=None, nonlinearity=tf.nn.relu)
return de_fc2_out
class AutoEncoder_ECFP_Three:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_three")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 500, stddev=.04, wd=None)
fc2 = linear_layer(fc1, 'fc2', 250, stddev=.04, wd=None)
fc3_out = linear_layer(fc2, "fc3_out", 100, stddev=.04, wd=None)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 250, stddev=.04, wd=None)
de_fc2 = linear_layer(de_fc1, "de_fc2", 500, stddev=.04, wd=None)
de_fc3_out = linear_layer(de_fc2, 'de_fc3_out', self.width, stddev=.04, \
wd=None, nonlinearity=tf.sigmoid)
return de_fc3_out
class AutoEncoder_ECFP_Five:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_three")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
fc1 = linear_layer(x, "fc1", 200, stddev=.04, wd=None)
fc2 = linear_layer(fc1, 'fc2', 150, stddev=.04, wd=None)
fc3 = linear_layer(fc2, "fc3", 100, stddev=.04, wd=None)
fc4 = linear_layer(fc3, "fc4", 100, stddev=.04, wd=None)
fc5_out = linear_layer(fc4, "fc5_out", 100, stddev=.04, wd=None)
return fc5_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, "de_fc1", 100, stddev=.04, wd=None)
de_fc2 = linear_layer(de_fc1, "de_fc2", 100, stddev=.04, wd=None)
de_fc3 = linear_layer(de_fc2, "de_fc3", 150, stddev=.04, wd=None)
de_fc4 = linear_layer(de_fc3, "de_fc4", 200, stddev=.04, wd=None)
de_fc5_out = linear_layer(de_fc4, 'de_fc5_out', self.width, stddev=.04, \
wd=None, nonlinearity=tf.sigmoid)
return de_fc5_out
class AutoEncoder_ECFP_SELU_Two:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_two_SELU")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 100
fc1 = linear_layer(x, 'fc1', f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu)
self.f2_out_width = 100
fc2_out = linear_layer(fc1, "fc2_out", self.f2_out_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
return fc2_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 100
de_fc1 = linear_layer(x, "de_fc1", f1_width, stddev=math.sqrt(1./self.f2_out_width), \
wd=weight_decay, nonlinearity=selu)
de_fc2_out = linear_layer(de_fc1, 'de_fc2_out', self.width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=tf.nn.relu)
return de_fc2_out
class AutoEncoder_ECFP_SELU_Five:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_SELU_Five")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
f1_width = 200
fc1 = linear_layer(x, "ecfps5_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu)
f2_width = 150
fc2 = linear_layer(fc1, 'ecfps5_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu)
f3_width = 100
fc3 = linear_layer(fc2, 'ecfps5_fc3', f3_width, stddev=math.sqrt(1./f2_width), wd=weight_decay, \
nonlinearity=selu)
f4_width = 100
fc4 = linear_layer(fc3, 'ecfps5_fc4', f4_width, stddev=math.sqrt(1./f3_width), wd=weight_decay, \
nonlinearity=selu)
self.f5_out_width = 100
fc5_out = linear_layer(fc4, "ecfps5_fc5_out", self.f5_out_width, stddev=math.sqrt(1./f4_width), \
wd=weight_decay, nonlinearity=selu)
return fc5_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 100
de_fc1 = linear_layer(x, "ecfps5_de_fc1", f1_width, stddev=math.sqrt(1./self.f5_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 100
de_fc2 = linear_layer(de_fc1, "ecfps5_de_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
f3_width = 150
de_fc3 = linear_layer(de_fc2, "ecfps5_de_fc3", f3_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu)
f4_width = 200
de_fc4 = linear_layer(de_fc3, "ecfps5_de_fc4", f4_width, stddev=math.sqrt(1./f3_width), \
wd=weight_decay, nonlinearity=selu)
de_fc5_out = linear_layer(de_fc4, 'ecfps5_de_fc5_out', self.width, stddev=math.sqrt(1./f4_width), \
wd=weight_decay, nonlinearity=tf.nn.relu)
return de_fc5_out
class AutoEncoder_ECFP_SELU:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_three_SELU")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 500
fc1 = linear_layer(x, "ecfps_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f2_width = 250
fc2 = linear_layer(fc1, 'ecfps_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f3_out_width = 100
fc3_out = linear_layer(fc2, "ecfps_fc3_out", self.f3_out_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu, reuse=reuse)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 250
de_fc1 = linear_layer(x, "ecfps_de_fc1", f1_width, stddev=math.sqrt(1./self.f3_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 500
de_fc2 = linear_layer(de_fc1, "ecfps_de_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
de_fc3_out = linear_layer(de_fc2, 'ecfps_de_fc3_out', self.width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=tf.nn.relu)
print("out layer has width", self.width)
return de_fc3_out
class RNASEQ_SELU_big:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU_big")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 5000
fc1 = linear_layer(x, "rnasb_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f2_width = 2000
fc2 = linear_layer(fc1, 'rnasb_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f3_width = 400
fc3 = linear_layer(fc2, 'rnasb_fc3', f3_width, stddev=math.sqrt(1./f2_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f4_out_width = 200
fc4_out = linear_layer(fc3, "rnasb_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 400
de_fc1 = linear_layer(x, "rnasb_de_fc1", f1_width, stddev=math.sqrt(1./self.f4_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 2000
de_fc2 = linear_layer(de_fc1, "rnasb_de_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
f3_width = 5000
de_fc3 = linear_layer(de_fc2, "rnasb_de_fc3", f3_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu)
de_fc4_out = linear_layer(de_fc3, 'rnasb_de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class RNASEQ_RELU_big:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_RELU_big")
self.is_training = is_training
self.pfx = "rnarb"
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 5000
fc1 = linear_layer(x, self.pfx+"_fc1", f1_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
f2_width = 2000
fc2 = linear_layer(fc1, self.pfx+'_fc2', f2_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
f3_width = 400
fc3 = linear_layer(fc2, self.pfx+'fc3', f3_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
self.f4_out_width = 200
fc4_out = linear_layer(fc3, "rnasb_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 400
de_fc1 = linear_layer(x, self.pfx+"_de_fc1", f1_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
f2_width = 2000
de_fc2 = linear_layer(de_fc1, self.pfx+"_de_fc2", f2_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
f3_width = 5000
de_fc3 = linear_layer(de_fc2, self.pfx+"_de_fc3", f3_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
de_fc4_out = linear_layer(de_fc3, self.pfx+'de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class RNASEQ_Sig_big:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_Sig_big")
self.is_training = is_training
self.pfx = "rnasigb"
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 5000
fc1 = linear_layer(x, self.pfx+"_fc1", f1_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.sigmoid, reuse=reuse)
f2_width = 2000
fc2 = linear_layer(fc1, self.pfx+'_fc2', f2_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.sigmoid, reuse=reuse)
f3_width = 400
fc3 = linear_layer(fc2, self.pfx+'fc3', f3_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.sigmoid, reuse=reuse)
self.f4_out_width = 200
fc4_out = linear_layer(fc3, "rnasb_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 400
de_fc1 = linear_layer(x, self.pfx+"_de_fc1", f1_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.sigmoid)
f2_width = 2000
de_fc2 = linear_layer(de_fc1, self.pfx+"_de_fc2", f2_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.sigmoid)
f3_width = 5000
de_fc3 = linear_layer(de_fc2, self.pfx+"_de_fc3", f3_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.sigmoid)
de_fc4_out = linear_layer(de_fc3, self.pfx+'de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class RNASEQ_SELU_bigger:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU_bigger")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(x, "rnasbr_fc1", 5000, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc2 = linear_layer(fc1, 'rnasbr_fc2', 2000, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc3 = linear_layer(fc2, 'rnasbr_fc3', 1000, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc4 = linear_layer(fc3, 'rnasbr_fc4', 500, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f5_out_width = 200
fc5_out = linear_layer(fc4, "rnasbr_fc5_out", self.f5_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc5_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, "rnasbr_de_fc1", 500, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc2 = linear_layer(de_fc1, "rnasbr_de_fc2", 1000, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc3 = linear_layer(de_fc2, "rnasbr_de_fc3", 2000, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc4 = linear_layer(de_fc3, "rnasbr_de_fc4", 5000, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc5_out = linear_layer(de_fc4, 'rnasbr_de_fc5_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc5_out
class RNASEQ_SELU_sq:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU_sq")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 1000
fc1 = linear_layer(x, "rnassq_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f2_width = 1000
fc2 = linear_layer(fc1, 'rnassq_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f3_width = 1000
fc3 = linear_layer(fc2, 'rnassq_fc3', f3_width, stddev=math.sqrt(1./f2_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f4_out_width = 200
fc4_out = linear_layer(fc3, "rnassq_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 1000
de_fc1 = linear_layer(x, "rnassq_de_fc1", f1_width, stddev=math.sqrt(1./self.f4_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 1000
de_fc2 = linear_layer(de_fc1, "rnassq_de_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
f3_width = 1000
de_fc3 = linear_layer(de_fc2, "rnassq_de_fc3", f3_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu)
de_fc4_out = linear_layer(de_fc3, 'rnassq_de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class RNASEQ_SELU_1k:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU_sq")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 1000
fc1 = linear_layer(x, "rnas1k_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f2_width = 1000
fc2 = linear_layer(fc1, 'rnas1k_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f3_width = 1000
fc3 = linear_layer(fc2, 'rnas1k_fc3', f3_width, stddev=math.sqrt(1./f2_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f4_out_width = 1000
fc4_out = linear_layer(fc3, "rnas1k_fc4_out", self.f4_out_width, stddev=math.sqrt(1./f3_width), \
wd=weight_decay, nonlinearity=selu, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 1000
de_fc1 = linear_layer(x, "rnas1k_de_fc1", f1_width, stddev=math.sqrt(1./self.f4_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 1000
de_fc2 = linear_layer(de_fc1, "rnas1k_de_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
f3_width = 1000
de_fc3 = linear_layer(de_fc2, "rnas1k_de_fc3", f3_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu)
de_fc4_out = linear_layer(de_fc3, 'rnas1k_de_fc4_out', self.width, stddev=math.sqrt(1./f3_width), \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class RNASEQ_SELU:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 5000
fc1 = linear_layer(x, "rnas_fc1", f1_width, stddev=math.sqrt(1./self.width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
f2_width = 1000
fc2 = linear_layer(fc1, 'rnas_fc2', f2_width, stddev=math.sqrt(1./f1_width), wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f3_out_width = 100
fc3_out = linear_layer(fc2, "rnas_fc3_out", self.f3_out_width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=selu, reuse=reuse)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 1000
de_fc1 = linear_layer(x, "rnas_de_fc1", f1_width, stddev=math.sqrt(1./self.f3_out_width), \
wd=weight_decay, nonlinearity=selu)
f2_width = 5000
de_fc2 = linear_layer(de_fc1, "rnas_e_fc2", f2_width, stddev=math.sqrt(1./f1_width), \
wd=weight_decay, nonlinearity=selu)
de_fc3_out = linear_layer(de_fc2, 'rnas_e_fc3_out', self.width, stddev=math.sqrt(1./f2_width), \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc3_out
class RNASEQ_sq:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_sq")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
f1_width = 1000
fc1 = linear_layer(x, "rnasq_fc1", f1_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
f2_width = 1000
fc2 = linear_layer(fc1, 'rnasq_fc2', f2_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
f3_width = 1000
fc3 = linear_layer(fc2, 'rnasq_fc3', f3_width, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
self.f4_out_width = 200
fc4_out = linear_layer(fc3, "rnasq_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
f1_width = 1000
de_fc1 = linear_layer(x, "rnasq_de_fc1", f1_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
f2_width = 1000
de_fc2 = linear_layer(de_fc1, "rnasq_de_fc2", f2_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
f3_width = 1000
de_fc3 = linear_layer(de_fc2, "rnasq_de_fc3", f3_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu)
de_fc4_out = linear_layer(de_fc3, 'rnasq_de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
class LBEXP_SELU:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for LBEXP_selu")
self.is_training = is_training
self.code = 'lbexpselu'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 200, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 150, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
self.f3_out_width = 100
fc3_out = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+"_fc3_out", self.f3_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, self.code+"_de_fc1", 150, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc2 = linear_layer(de_fc1, self.code+"_de_fc2", 200, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc3_out = linear_layer(de_fc2, self.code+'_de_fc3_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc3_out
class LBEXP_RELU:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for LBEXP_relu")
self.is_training = is_training
self.code = 'lbexprelu'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 200, stddev='Xav', wd=weight_decay, \
nonlinearity=None, reuse=reuse)
fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 150, stddev='Xav', wd=weight_decay, \
nonlinearity=None, reuse=reuse)
self.f3_out_width = 100
fc3_out = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+"_fc3_out", self.f3_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc3_out
class AutoEncoder_ECFP_Three_BN:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_three_BN")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
fc1 = batch_normalized_linear_layer(x, "fc1", 500, stddev=.01, wd=weight_decay, test=test)
fc2 = batch_normalized_linear_layer(fc1, 'fc2', 250, stddev=.01, wd=weight_decay, test=test)
fc3_out = linear_layer(fc2, "fc3_out", 100, stddev=.01, wd=weight_decay)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = batch_normalized_linear_layer(x, "de_fc1", 250, stddev=.01, wd=weight_decay, test=test)
de_fc2 = batch_normalized_linear_layer(de_fc1, "de_fc2", 500, stddev=.01, wd=weight_decay, test=test)
de_fc3_out = linear_layer(de_fc2, 'de_fc3_out', self.width, stddev=.01, \
wd=weight_decay, nonlinearity=intense_sigmoid)
return de_fc3_out
class AutoEncoder_ECFP_Skinny_BN:
def __init__(self, numFeatures, is_training=True):
# self.width is how many features make up the input/output
print("AE for ECFP_three_skinny_BN")
self.width = numFeatures
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay):
test = not self.is_training
fc1 = batch_normalized_linear_layer(x, "fc1", 100, stddev=.01, wd=weight_decay, test=test)
#fc2 = batch_normalized_linear_layer(fc1, 'fc2', 100, stddev=.01, wd=weight_decay, test=test)
fc3_out = linear_layer(fc1, "fc3_out", 100, stddev=.01, wd=weight_decay)
return fc3_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = batch_normalized_linear_layer(x, "de_fc1", 100, stddev=.01, wd=weight_decay, test=test)
#de_fc2 = batch_normalized_linear_layer(de_fc1, "de_fc2", 100, stddev=.01, wd=weight_decay, test=test)
de_fc3_out = linear_layer(de_fc1, 'de_fc3_out', self.width, stddev=.01, \
wd=weight_decay, nonlinearity=intense_sigmoid)
return de_fc3_out
class FANG_RELU_DRAGON:
def __init__(self):
print("classifier for fang_relu_dragon")
self.code = 'fang_relu_dragon'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
fc3 = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+'_fc3', 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
return fc3
class FANG_RELU_GENE:
def __init__(self):
print("classifier for fang_relu_gene")
self.code = 'fang_relu_gene'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
fc3 = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+'_fc3', 1000, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
return fc3
class TOX_RELU:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("classifier for tox_relu")
self.is_training = is_training
self.code = 'toxrelu'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 50, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
#fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 50, stddev='Xav', wd=weight_decay, \
# nonlinearity=tf.nn.relu, reuse=reuse)
#fc3 = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+'_fc3', 25, stddev='Xav', wd=weight_decay, \
# nonlinearity=tf.nn.relu, reuse=reuse)
self.f4_out_width = 10
fc4_out = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+"_fc4_out", self.f4_out_width, stddev='Xav', \
wd=weight_decay, nonlinearity=tf.nn.relu, reuse=reuse)
return fc4_out
class TOX_RELU_REG:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("classifier for tox_relu_reg")
self.is_training = is_training
self.code = 'toxrelu_reg'
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
#fc1 = linear_layer(tf.nn.dropout(x, keep_prob), self.code+"_fc1", 50, stddev='Xav', wd=weight_decay, \
# nonlinearity=tf.nn.relu, reuse=reuse)
#fc2 = linear_layer(tf.nn.dropout(fc1, keep_prob), self.code+'_fc2', 50, stddev='Xav', wd=weight_decay, \
# nonlinearity=tf.nn.relu, reuse=reuse)
#fc3 = linear_layer(tf.nn.dropout(fc2, keep_prob), self.code+'_fc3', 25, stddev='Xav', wd=weight_decay, \
# nonlinearity=tf.nn.relu, reuse=reuse)
#fc4 = linear_layer(tf.nn.dropout(fc3, keep_prob), self.code+"_fc4", 20, stddev='Xav', \
# wd=weight_decay, nonlinearity=tf.nn.relu, reuse=reuse)
regressionLayer = linear_layer(x, self.code+"_reg_layer", 1, stddev='Xav', wd=weight_decay, \
nonlinearity=tf.nn.relu, reuse=reuse)
return regressionLayer
class RNASEQ_SELU_big:
def __init__(self, is_training=True):
# self.width is how many features make up the input/output
print("AE for RNASEQ_SELU_big")
self.is_training = is_training
# Building the encoder
def encoder(self, x, keep_prob, weight_decay, reuse=None):
test = not self.is_training
self.width = x.get_shape().as_list()[1]
print("x shape", x.get_shape().as_list())
print("self.width", self.width)
fc1 = linear_layer(x, "rnasb_fc1", 5000, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc2 = linear_layer(fc1, 'rnasb_fc2', 2000, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc3 = linear_layer(fc2, 'rnasb_fc3', 400, stddev='selu', wd=weight_decay, \
nonlinearity=selu, reuse=reuse)
fc4_out = linear_layer(fc3, "rnasb_fc4_out", 200, stddev='Xav', \
wd=weight_decay, nonlinearity=None, reuse=reuse)
return fc4_out
# Building the decoder
def decoder(self, x, keep_prob, weight_decay):
test = not self.is_training
de_fc1 = linear_layer(x, "rnasb_de_fc1", 400, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc2 = linear_layer(de_fc1, "rnasb_de_fc2", 2000, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc3 = linear_layer(de_fc2, "rnasb_de_fc3", 5000, stddev='selu', \
wd=weight_decay, nonlinearity=selu)
de_fc4_out = linear_layer(de_fc3, 'rnasb_de_fc4_out', self.width, stddev='Xav', \
wd=weight_decay, nonlinearity=None)
print("out layer has width", self.width)
return de_fc4_out
|
StarcoderdataPython
|
6427508
|
<reponame>ModelDBRepository/137676
#encoding: utf-8
"""
images.py -- Toolbox functions for creating and handling image output
Exported namespace: image_blast, array_to_rgba, array_to_image
Written by <NAME>
Center for Theoretical Neuroscience
Copyright (c) 2007-2008 Columbia Unversity. All Rights Reserved.
This software is provided AS IS under the terms of the Open Source MIT License.
See http://www.opensource.org/licenses/mit-license.php.
"""
import os
import numpy as np
def image_blast(M, savedir, stem='image', fmt='%s_%03d', rev=False, **kwargs):
"""Save a rank-3 stacked intensity matrix *M* to a set of individual PNG
image files in the directory *savedir*.
If *savedir* does not exist it will be created. Set **stem** to specify
the filename suffix.
Keyword arguments:
stem -- file name stem to be used for output images
fmt -- a unique_path fmt specification (need an %s followed by a %d)
rev -- indicate use of a reversed fmt specification (%d followed by a %s)
Extra keyword arguments will get passed through to array_to_rgba. See its
doc string for details.
"""
assert M.ndim == 3, 'requires rank-3 array of intensity values'
d = os.path.realpath(str(savedir))
if not os.path.exists(d):
os.makedirs(d)
stem = os.path.join(d, stem)
N = M.shape[0]
first, middle, last = "", "", ""
for i,m in enumerate(M):
image_fn = unique_path(stem, fmt=fmt, ext="png", reverse_fmt=rev)
if i == 0:
first = image_fn
elif i == N-1:
last = image_fn
array_to_image(m, image_fn, **kwargs)
if N == 2:
middle += '\n'
elif N > 2:
middle += '\n\t...\n'
print first, middle, last
return
def array_to_rgba(mat, cmap=None, norm=True, cmin=0, cmax=1):
"""Intensity matrix (float64) -> RGBA colormapped matrix (uint8)
Keyword arguments:
cmap -- a matplotlib.cm colormap object
norm -- whether the color range is normalized to values in M
If *norm* is set to False:
cmin -- minimum clipping bound of the color range (default 0)
cmax -- maximum clipping bound of the color range (default 1)
"""
if cmap is None:
from matplotlib import cm
cmap = cm.hot
M = mat.copy()
data_min, data_max = M.min(), M.max()
if norm:
cmin, cmax = data_min, data_max
else:
if cmin > data_min:
M[M < cmin] = cmin # clip lower bound
if cmax < data_max:
M[M > cmax] = cmax # clip uppder bound
return cmap((M-cmin)/float(cmax-cmin), bytes=True)
def array_to_image(M, filename, **kwargs):
"""Save matrix, autoscaled, to image file (use PIL fmts)
Keyword arguments are passed to array_to_rgba.
"""
import sys
if M.ndim != 2:
raise ValueError, 'requires rank-2 matrix'
if sys.platform == "win32":
import Image
else:
from PIL import Image
img = Image.fromarray(array_to_rgba(M, **kwargs), 'RGBA')
img.save(filename)
return
def tiling_dims(N):
"""Square-ish (rows, columns) for tiling N things
"""
d = np.ceil(np.sqrt(N))
return int(np.ceil(N / d)), int(d)
|
StarcoderdataPython
|
8067726
|
# describes the functionality tha ta player can have in game
class Ability():
LEFT, RIGHT, JUMP, DOUBLE_JUMP, DROP = range(5) #how to make terrible enums
|
StarcoderdataPython
|
11284269
|
<gh_stars>1000+
from rssant_api.models.story_storage.common.story_key import StoryId, hash_feed_id
def test_hash_feed_id():
for i in [0, 1, 2, 7, 1024, 2**31, 2**32 - 1]:
val = hash_feed_id(i)
assert val >= 0 and val < 2**32
def test_story_id():
cases = [
(123, 10, 0x7b000000a0),
(123, 1023, 0x7b00003ff0),
]
for feed_id, offset, story_id in cases:
assert StoryId.encode(feed_id, offset) == story_id
assert StoryId.decode(story_id) == (feed_id, offset)
|
StarcoderdataPython
|
4842355
|
<gh_stars>1-10
class Solution:
def shortestCommonSupersequence(self, A: str, B: str) -> str:
m, n = len(A), len(B)
dp = [[""] * (n + 1) for _ in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if A[i - 1] == B[j - 1]:
dp[i][j] = dp[i - 1][j - 1] + A[i - 1]
else:
dp[i][j] = max(dp[i - 1][j], dp[i][j - 1], key=len)
lcs = dp[-1][-1]
i, j = 0, 0
res = []
for ch in lcs:
while i < m and A[i] != ch:
res.append(A[i])
i += 1
while j < n and B[j] != ch:
res.append(B[j])
j += 1
i += 1
j += 1
res.append(ch)
return ''.join(res) + A[i:] + B[j:]
|
StarcoderdataPython
|
3261498
|
<reponame>Albert-91/precon<filename>src/precon/exploring.py
import logging
from dataclasses import dataclass
from functools import reduce
from typing import List, Tuple
import math
from precon.devices_handlers.distance_sensor import get_distance
from precon.devices_handlers.driving_engines import turn_right_on_angle, drive_forward_on_units, drive_backward_on_units
logger = logging.getLogger(__name__)
DEFAULT_NUMBER_OF_DIRECTIONS_TO_CHECK = 10
@dataclass(frozen=True)
class Location:
x: float
y: float
@dataclass(frozen=True)
class DirectionInfo:
location: Location
angle: int
distance: int
@dataclass(frozen=True)
class ObstacleLocation:
x: int
y: int
@dataclass(frozen=True)
class UndiscoveredRegion:
x: float
y: float
class Localizer:
def __init__(self, x: float = 0.0, y: float = 0.0, angle: int = 0) -> None:
self._x = x
self._y = y
self._angle = angle
self._locations: List[Tuple[float, float]] = [self.current_location]
@property
def current_location(self) -> Tuple[float, float]:
return self._x, self._y
@property
def current_angle(self) -> int:
return self._angle
@property
def all_locations(self) -> List[Tuple[float, float]]:
return self._locations
def update(self, movement: int, angle: int = 0) -> None:
self._y = round(self._y + movement * math.cos(math.radians(angle)), 3)
self._x = round(self._x + movement * math.sin(math.radians(angle)), 3)
self._angle += angle
logger.debug(f"Set x = {self._x}, y = {self._y}, angle={self._angle}")
self._locations.append(self.current_location)
class Mapper:
"""Mapper keeps all mapped locations."""
MAXIMUM_DISTANCE_TO_SET_OBSTACLE = 2000
def __init__(self) -> None:
self._obstacles: List[ObstacleLocation] = []
@property
def obstacles(self) -> List[ObstacleLocation]:
return self._obstacles
async def map_obstacles(self, directions: List[DirectionInfo]) -> None:
directions_less_than_max = filter(lambda x: x.distance <= self.MAXIMUM_DISTANCE_TO_SET_OBSTACLE, directions)
for direction in directions_less_than_max:
location = self._compute_obstacle_coordinates(direction)
self._obstacles.append(location)
logger.debug(f"Added new obstacle's location: {location}")
@staticmethod
def _compute_obstacle_coordinates(direction: DirectionInfo) -> ObstacleLocation:
def round_half_up(n: float, decimals: int = 0) -> float:
multiplier: int = 10 ** decimals
return math.floor(n * multiplier + 0.5) / multiplier
y = direction.location.y + direction.distance * math.cos(math.radians(direction.angle))
x = direction.location.x + direction.distance * math.sin(math.radians(direction.angle))
return ObstacleLocation(int(round_half_up(x)), int(round_half_up(y)))
class PathPlanner:
def __init__(self, mapper: Mapper) -> None:
self._mapper: Mapper = mapper
self._undiscovered_regions: List[UndiscoveredRegion] = []
@property
def undiscovered_regions(self) -> List[UndiscoveredRegion]:
return self._undiscovered_regions
async def compute_undiscovered_location(self) -> None:
def count_neighbours_in_radius(point: ObstacleLocation, nodes: List[ObstacleLocation], radius: int = 2) -> int:
result = []
for node in nodes:
if (point.x - node.x) ** 2 + (point.y - node.y) ** 2 <= radius and not point == node:
result.append(node)
return len(result)
for location in self._mapper.obstacles:
neighbours_number = count_neighbours_in_radius(location, self._mapper.obstacles)
if neighbours_number < 2:
self._undiscovered_regions.append(UndiscoveredRegion(location.x, location.y))
class Explorer:
"""Explorer knows current location and takes information which areas has to be discovered.
He decides which area will be discovered as first."""
MAXIMUM_NUMBER_OF_DIRECTIONS: int = 20
def __init__(self, localizer: Localizer) -> None:
self._localizer: Localizer = localizer
self._mapper: Mapper = Mapper()
async def run(self) -> None:
await self.scan_area()
while not self._mapper.obstacles:
self.move_forward(unit=100)
await self.scan_area()
async def scan_area(self, directions_number: int = DEFAULT_NUMBER_OF_DIRECTIONS_TO_CHECK) -> List[DirectionInfo]:
self._validate_directions_number(directions_number)
angle_per_rotation = int(360 / directions_number)
angle = self._localizer.current_angle
directions = [
DirectionInfo(
location=Location(*self._localizer.current_location), angle=angle, distance=await get_distance()
)
]
for _ in range(directions_number):
angle += angle_per_rotation
directions.append(
DirectionInfo(
location=Location(*self._localizer.current_location), angle=angle, distance=await get_distance()
)
)
turn_right_on_angle(angle_per_rotation)
turn_right_on_angle(360 - angle)
await self._mapper.map_obstacles(directions)
return directions
def move_forward(self, unit: int = 1) -> None:
drive_forward_on_units(unit=unit)
self._localizer.update(unit)
def move_backward(self, unit: int = 1) -> None:
drive_backward_on_units(unit=unit)
self._localizer.update(-unit)
async def get_direction_to_move(self) -> DirectionInfo:
directions = await self.scan_area()
if not directions:
raise NoDirectionFound
return reduce(lambda a, b: a if a.distance > b.distance else b, directions)
def _validate_directions_number(self, directions_number: int) -> None:
if (
not isinstance(directions_number, int)
or directions_number <= 1
or directions_number > self.MAXIMUM_NUMBER_OF_DIRECTIONS
):
raise ValueError
class NoDirectionFound(Exception):
pass
|
StarcoderdataPython
|
11352244
|
"""
monobit.image - fonts stored in image files
(c) 2019--2021 <NAME>
licence: https://opensource.org/licenses/MIT
"""
import logging
from collections import Counter
from pathlib import Path
try:
from PIL import Image
except ImportError:
Image = None
from ..scripting import pair, rgb
from ..binary import ceildiv
from ..storage import loaders, savers
from ..streams import FileFormatError
from ..font import Font
from ..glyph import Glyph
from ..renderer import chart_image
DEFAULT_IMAGE_FORMAT = 'png'
# available background policies
# -----------------------------
#
# most-common use colour most commonly found in payload cells
# least-common use colour least commonly found in payload cells
# brightest use brightest colour, by sum of RGB values
# darkest use darkest colour, by sum of RGB values
# top-left use colour of top-left pixel in first cell
if Image:
@loaders.register(
'png', 'bmp', 'gif', 'tif', 'tiff',
'ppm', 'pgm', 'pbm', 'pnm', 'webp',
'pcx', 'tga', 'jpg', 'jpeg',
magic=(
# PNG
b'\x89PNG\r\n\x1a\n',
# BMP
#b'BM', # -- clash with bmfont b'BMF'
# GIF
b'GIF87a', b'GIF89a',
# TIFF
b'\x4D\x4D\x00\x2A', b'\x49\x49\x2A\x00'
# PNM
b'P1', b'P2', b'P3',
# WebP
b'RIFF',
# PCX
b'\n\x00', b'\n\x02', b'\n\x03', b'\n\x04', b'\n\x05',
# JPEG
b'\xFF\xD8\xFF',
),
name='Bitmap Image',
)
def load_image(
infile, where=None,
cell:pair=(8, 8),
margin:pair=(0, 0),
padding:pair=(0, 0),
scale:pair=(1, 1),
# 0 or negative indicates 'use all chars'
numchars:int=0,
background:str='most-common'
):
"""
Extract character-cell font from image.
cell: size X,Y of character cell
margin: number of pixels in X,Y direction around glyph chart
padding: number of pixels in X,Y direction between glyph
scale: number of pixels in X,Y direction per glyph bit
numchars: number of glyphs to extract
background: determine background from "most-common", "least-common", "brightest", "darkest", "top-left" colour
"""
width, height = cell
scale_x, scale_y = scale
padding_x, padding_y = padding
margin_x, margin_y = margin
# work out image geometry
step_x = width * scale_x + padding_x
step_y = height *scale_y + padding_y
# maximum number of cells that fits
img = Image.open(infile)
ncells_x = (img.width - margin_x) // step_x
ncells_y = (img.height - margin_y) // step_y
# extract sub-images
# assume row-major left-to-right top-to-bottom
crops = [
img.crop((
margin_x + _col*step_x,
margin_y + _row*step_y,
margin_x + _col*step_x + width * scale_x,
margin_y + _row*step_y + height * scale_y,
))
for _row in range(ncells_y)
for _col in range(ncells_x)
]
if not crops:
logging.error('Image too small; no characters found.')
return Font()
# scale
crops = [_crop.resize(cell) for _crop in crops]
# get pixels
crops = [list(_crop.getdata()) for _crop in crops]
# restrict to requested number of characters
if numchars and numchars > 0:
crops = crops[:numchars]
# check that cells are monochrome
colourset = set.union(*(set(_data) for _data in crops))
if len(colourset) > 2:
logging.warning('Colour, greyscale and antialiased glyphs are not supported. ')
logging.warning(
f'More than two colours ({len(colourset)}) found in payload. '
'All non-background colours will be converted to foreground.'
)
colourfreq = Counter(_c for _data in crops for _c in _data)
brightness = sorted((sum(_v for _v in _c), _c) for _c in colourset)
if background == 'most-common':
# most common colour in image assumed to be background colour
bg, _ = colourfreq.most_common(1)[0]
elif background == 'least-common':
# least common colour in image assumed to be background colour
bg, _ = colourfreq.most_common()[-1]
elif background == 'brightest':
# brightest colour assumed to be background
_, bg = brightness[-1]
elif background == 'darkest':
# darkest colour assumed to be background
_, bg = brightness[0]
elif background == 'top-left':
# top-left pixel of first char assumed to be background colour
bg = crops[0][0]
# replace colours with characters
crops = tuple(
[_c != bg for _c in _cell]
for _cell in crops
)
# reshape cells
glyphs = [
Glyph(tuple(
_cell[_offs: _offs+width]
for _offs in range(0, len(_cell), width)
))
for _cell in crops
]
# set code points
return Font(glyphs)
@savers.register(linked=load_image)
def save_image(
fonts, outfile, where=None,
format:str='',
columns:int=32,
margin:pair=(0, 0),
padding:pair=(0, 0),
scale:pair=(1, 1),
border:rgb=(32, 32, 32), paper:rgb=(0, 0, 0), ink:rgb=(255, 255, 255),
):
"""
Export character-cell font to image.
format: image file format
columns: number of columns in glyph chart
margin: number of pixels in X,Y direction around glyph chart
padding: number of pixels in X,Y direction between glyph
scale: number of pixels in X,Y direction per glyph bit
border: border colour R,G,B 0--255
paper: background colour R,G,B 0--255
ink: foreground colour R,G,B 0--255
"""
if len(fonts) > 1:
raise FileFormatError('Can only save one font to image file.')
img = chart_image(fonts[0], columns, margin, padding, scale, border, paper, ink)
try:
img.save(outfile, format=format or Path(outfile).suffix[1:])
except (KeyError, ValueError, TypeError):
img.save(outfile, format=DEFAULT_IMAGE_FORMAT)
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.