prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
# coding=utf-8
# C | opyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# Yo | u may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Licens... |
##
## Biskit, a toolkit for the manipulation of macromolecular structures
## Copyright (C) 2004-2018 Raik Gruenberg & Johan Leckner
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either v... | ' % (msg, os.uname()[1], id)
s += '\Error:' + T.lastError()
s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n'
s += '\n'
self. | errorLog.add( s )
except Exception, why:
f = open('ErrorReportError_AmberEntropySlave','a')
f.write( str(type(why)) )
try:
f.write( T.lastErrorTrace() )
except:
pass
f.close()
def go(self, jobs):
"""
... |
ike of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q i... | _` : array, shape = [n_samples, n_components]
X scores.
`y_scores_` : array, shape = [n_samples, n_components]
Y scores.
`x_rotations_` : array, shape = [p, n_components]
X block to latents rotations.
`y_rotations_` : array, shape = [q, n_components]
Y block to latents rot... | Xk u) var(Yk u), such that ``|u| = |v| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by defl... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrati | ons, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0001_initial'),
]
operations = [
| migrations.AlterField(
model_name='order',
name='paid',
field=models.BooleanField(default=False),
),
]
|
# Django settings for trywsk project.
DEBUG = False
TEMPLATE_DEBUG = DEBUG
import os
from unipath import Path
PROJECT_ROOT = Path(__file__).ancestor(2)
PROJECT_ROOT = os.path.join(PROJECT_ROOT,'whisk_tutorial')
ADMINS = (
('IBM jStart', 'jstart@us.ibm.com'),
)
MANAGERS = ADMINS
DATABASES = {}
TEST_RUNNER = ... | x',
#'django_extensions',
#'south'
)
PROJECT_APPS = (
# 'base',
)
INSTALLED_APPS = PREREQ_APPS + PROJECT_APPS
# A sample l | ogging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
... |
import os
impor | t sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import fasta
import genetics
import table
def main(argv):
codon = table.codon(argv[0])
strings = fasta.read_ordered(argv[1])
dna = strings[0]
introns = strings[1:]
for intron in introns:
dna = dna.replace(i... | _protein(genetics.dna_to_rna(dna), codon)
if __name__ == "__main__":
main(sys.argv[1:])
|
"""Base settings shared by all environments.
This is a reusable basic settings file.
"""
from django.conf.global_settings import *
import os
import sys
import re
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
TIME_ZONE = 'GB'
USE_TZ = True
US... | o.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.co | ntrib.admin',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',... |
# test re.sub with unmatched groups, behaviour changed in CPython 3.5
try:
import ure as re
except ImportError:
try:
| import re
except ImportError:
print("SKIP")
raise SystemExit
try:
re.sub
except AttributeError:
print("SKIP")
raise SystemExit
# first | group matches, second optional group doesn't so is replaced with a blank
print(re.sub(r"(a)(b)?", r"\2-\1", "1a2"))
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
'''Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Softwar... | (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PU | RPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.'''
###############################################################################
# Tools to create a python shell wit... |
'Meta': {'object_name': 'Group'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': "u''", 'max_lengt... | 'group_by_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'people': ('aldryn_common.admin_fields.sortedm2m.SortedM2MModelField', [], {'symmetrical': 'False', 'to': u"orm['aldryn_people.Person']", 'null': 'True', 'blank': 'True'}),
'show_links': ('django.db.model... | ,
'show_vcard': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "u'standard'", 'max_length': '50'})
},
u'aldryn_people.person': {
'Meta': {'object_name': 'Person'},
'email':... |
"""
telemetry full tests.
"""
import platform
import sys
from unittest import mock
import pytest
import wandb
def test_telemetry_finish(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx()... | th mock.patch.dict("sys.modules", {"catboost": mock.Mock()}):
import catboost
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
| # catboost in both init and finish modules
assert telemetry and 7 in telemetry.get("1", [])
assert telemetry and 7 in telemetry.get("2", [])
@pytest.mark.skipif(
platform.system() == "Windows", reason="test suite does not build jaxlib on windows"
)
@pytest.mark.skipif(sys.versi... |
'''
Crunchyroll urlresolver plugin
Copyright (C) 2013 voinage
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is ... | a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces imp | ort PluginSettings
from urlresolver.plugnplay import Plugin
import re
import urllib2
from urlresolver import common
import os
class CrunchyRollResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "crunchyroll"
domains = [ "crunchyroll.com" ]
def __init__(se... |
from django.apps import | apps
from contextlib import contextmanager
def session():
return apps.get_app_config('basex').basex
@contextmanager
def recipe_db():
s = session()
| s.execute('open recipe')
yield s
s.close()
|
#! /usr/bin/env python
"""
Sample script that illustrates exclusive card connection decorators.
__author__ = "http://www.gemalto.com"
Copyright 2001-2010 gemalto
Author: Jean-Daniel Aussel, mailto:jean-daniel.aussel@gemalto.com
This file is part of pyscard.
pyscard is free software; you can redistribute it and/or m... | option) any later version.
pyscard is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser Gener... | long with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
from smartcard.CardType import AnyCardType
from smartcard.CardRequest import CardRequest
from smartcard.CardConnectionObserver import ConsoleCardConnectionObserver
from smartcard.CardConn... |
import os
from nose.tools import (assert_equal,
assert_ | true)
from ckantoolkit import config
import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
import ckanapi
import boto
from moto import mock_s3
import logging
log = logging.getLogger(__name__)
class TestS3ControllerResourceDownload(helpers.FunctionalTestBase):
| def _upload_resource(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = ckanapi.TestAppCKAN(app, apikey='my-test-key')
factories.Dataset(name="my-dataset")
file_path = os.path.join(os.path.dirname(__file__), 'data.csv')
resource = demo.a... |
de_formatter.extra import LinebreakingAttributeFormatter
>>> formatters = dict(base.formatters,
... **{Call: LinebreakingAttributeFormatter.call_formatter_factory(base.formatters[ast.Call]),
... Attribute: LinebreakingAttributeFormatter,
... ... | ent=None, value_formatter=None):
# if value_formatt | er is not provided check wether we are not part of attribute ref
if value_formatter is None and isinstance(expr.value, ast.Attribute):
return LinebreakingAttributeFormatter(expr, formatters_register, parent)
return super(RedirectingSubsriptionFormatter, cls).__new__(c... |
timedelta(minutes=30 * i)
max_events = max(max_events, len(schedule[half_hour][id]))
max_simul[id] = max_events
for half_hour in schedule:
for location in schedule[half_hour]:
for event in schedule[half_hour][location]:
if isinstance(... | ion.add(add_panel)
if event.is_new:
event.name = add_panel.name
event.description = add_panel.description
for pa in add_panel.applicants:
if pa.attendee_id:
assigned_panelist = AssignedPanelis... | session.add(assigned_panelist)
|
rt boto3
import boto
from boto.exception import EC2ResponseError
import sure # noqa
from moto import mock_ec2
SAMPLE_DOMAIN_NAME = u'example.com'
SAMPLE_NAME_SERVERS = [u'10.0.0.6', u'10.0.0.7']
@mock_ec2
def test_vpcs():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc = conn.create_vpc("10.0.0.0/16")... | options.id, vpc1.id)
conn.associate_dhcp_options(dhcp_options.id, vpc2.id)
vpcs = conn.get_all_vpcs(filters={'dhcp-options-id': dhcp_options.id})
vpcs.should.have.length_of(2)
vpc_ids = tuple(map(lambda v: v.id, vpcs))
vpc1.id.should.be.within(vpc_ids)
vpc2.id.should.be.within(vpc_ids)
|
@mock_ec2
def test_vpc_get_by_tag():
conn = boto.connect_vpc()
vpc1 = conn.create_vpc("10.0.0.0/16")
vpc2 = conn.create_vpc("10.0.0.0/16")
vpc3 = conn.create_vpc("10.0.0.0/24")
vpc1.add_tag('Name', 'TestVPC')
vpc2.add_tag('Name', 'TestVPC')
vpc3.add_tag('Name', 'TestVPC2')
vpcs = conn... |
import datetime
import logging
import textwrap
import time
import click
import hatarake
import hatarake.net as requests
from hatarake.config import Config
logger = logging.getLogger(__name__)
@click.group()
@click.option('-v', '--verbosity', count=True)
def main(verbosity):
logging.basicConfig(level=logging.WA... | from the final title to make things neater
# but in the future, may want to leave the hash tag in the full title
tags = {tag.strip("#") for tag in title.split() if tag.startswith("#")}
title = ' '.join({tag for tag in title.split() if not tag | .startswith('#')})
response = requests.post(
api + '/append',
headers={
'Authorization': 'Token %s' % token,
},
data={
'start': start.isoformat(),
'end': end.isoformat(),
'category': tags,
'title': title,
}
)
... |
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# 3p
import requests
# project
from checks import AgentCheck
from util import headers
class PHPFPMCheck(AgentCheck):
"""
Tracks basic php-fpm metrics via the status module
Requires php-fpm pools to ... | l, e))
self.service_check(self.SERVICE_CHECK_NAME,
AgentCheck.CRITICAL, tags=sc_tags, message=str(e))
else:
self.service_check(self.SERV | ICE_CHECK_NAME, AgentCheck.OK, tags=sc_tags)
|
miss_penalty)
def false_detection(self):
self.marker(self.markerbase+4)
self.rewardlogic.score_event(self.false_penalty)
def correct(self):
if self.focused:
if ((self.cueobj is not None) and self.cueobj.iscued):
self.marker(self.markerbase+5 ... | handleduration=self.timeout,
| defaulthandler=self.false_detection)
while True:
# off status
if self.tick_rate is not None:
t_end = time.time()+self.event_interval()
while time.time() < t_end:
self.marker(self.markerbase+10)
... |
import sqlite3
import time
conn = sqlite3.connect('log.db')
c = conn.cursor()
# Create table
c.execute("CREATE T | ABLE if not exists log (log_timestamp DECIMAL(12,8), "
"log_source text, msg_sequence integer, log_message text, statu | s text)")
for x in range(0, 1000):
insertquery = "INSERT INTO log (log_timestamp, log_source, msg_sequence, log_message) " \
"VALUES ({0},'tst source', {1}, 'log message')".format(time.time(), x)
c.execute(insertquery)
conn.commit()
conn.close() |
# -*- coding: utf-8 -*-
import functools
import httplib as http
import logging
import time
import bleach
from django.db.models import Q
from flask import request
from framework.auth.decorators import collect_auth
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from f... | matching_title, 'is_deleted', is_deleted, True)
matching_title = conditionally_add_query_item(matching_title, 'type', is_registration, 'osf.registration')
matching_title = conditionally_add_query_item(matching_title, 'type', is_collection, 'osf.collection')
if len(ignore_nodes) > 0:
for node_id in ... | my_projects = AbstractNode.objects.filter(
matching_title &
Q(_contributors=user) # user is a contributor
)[:max_results]
my_project_count = my_project_count
if my_project_count < max_results and include_public == 'yes':
public_projects = AbstractNode.object... |
from __future__ import print_function
import unittest
import RMF
class Tests(unittest.TestCase):
def test_multiparent(self):
"""Test that nodes with multiple parents can be used and resolve"""
for suffix in RMF.suffixes:
path = RMF._get_temporary_file_path("alias2." + suffix)
... | nh = rh.add_child("hi", RMF.REPRESENTATION)
nh.add_child(rh)
ch = nh.get_children()
self.assertEqual(len(ch), 1)
print(ch)
self.assertEqual(ch[0], rh)
def test_aliases(self):
"""Test that aliases can be used and resolve"""
for suffix... | get_temporary_file_path("alias." + suffix)
print(path)
fh = RMF.create_rmf_file(path)
print("create factory")
af = RMF.AliasFactory(fh)
rh = fh.get_root_node()
nh = rh.add_child("hi", RMF.REPRESENTATION)
af.get(nh.add_child("alias", RMF... |
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
"""
clang/llvm detection.
"""
import os, sys
from waflib import Configure, Options, Utils
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@conf
def find_clang(conf):
"""
... | .exe'
v['cshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v['CFLAGS_cshlib'] = []
v.append_value('CFLAGS_cshlib', ['-DDLL_EXPORT']) # TODO adding non | standard defines like this DLL_EXPORT is not a good idea
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', ['-Wl,--enable-au... |
aramstring)
elif first_check_source == "vanished" and check_source == "new":
cluster_items[(check_type, item)] = ("old", first_paramstring)
elif check_source == "vanished" and first_check_source == "new":
cluster_items[(check_type, ... | Reading, parsing, writing, modifying autochecks files |
# '----------------------------------------------------------------------'
# Read automatically discovered checks of one host.
# world: "config" -> File in var/check_mk/autochecks
# " | active" -> Copy in var/check_mk/core/autochecks
# Returns a table with three columns:
# 1. check_type
# 2. item
# 3. parameters evaluated!
def read_autochecks_of(hostname, world="config"):
if world == "config":
basedir = autochecksdir
else:
basedir = var_dir + "/core/autochecks"
filepath = b... |
str("secure_sysvars")
class InstSecureSysvars(Instruction):
def execute(self, fr):
fr.globalvar_set(0, fr.user)
fr.globalvar_set(1, si.DBRef(db.getobj(fr.user).location))
fr.globalvar_set(2, fr.trigger)
fr.globalvar_set(3, fr.command)
@instr("!")
class InstBang(Instruction):
de... | Instruction):
def execute(self, fr):
num = fr.data_pop(int)
fr.check_underflow(num)
if not num:
return
if num < 0:
raise MufRuntimeError("Expected positive integer.")
else:
a = fr.data_pick(num)
fr.data_push(a)
@instr("over")
... | a_push(a)
@instr("put")
class InstPut(Instruction):
def execute(self, fr):
fr.check_underflow(2)
num = fr.data_pop(int)
val = fr.data_pop()
fr.check_underflow(num)
if not num:
return
if num < 0:
raise MufRuntimeError("Value out of range")
... |
g3.status_at_node[mg3.nodes_at_left_edge] = FIXED_VALUE_BOUNDARY
>>> mg3.status_at_node[mg3.nodes_at_top_edge] = CLOSED_BOUNDARY
>>> mg3.status_at_node[mg3.nodes_at_bottom_edge] = CLOSED_BOUNDARY
>>> mg3.status_at_node[mg3.nodes_at_right_edge] = CLOSED_BOUNDARY
>>> fr3 = FlowRouter(mg3)
>>> K_field ... | a float
self._r_i = float(rainfall_intensity)
elif len(rainfall_intensity) == self.grid.number_of_nodes:
raise ValueError('This component can no longer handle ' +
'spatially variable rainfall. Use ' +
'StreamPo | werEroder.')
self._r_i = numpy.array(rainfall_intensity)
else:
raise TypeError('Supplied type of rainfall_' +
'intensity was not recognised!')
# We now forbid changing of the field name
if 'value_field' in kwds.keys():
raise ValueE... |
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from models import Project
class ProjectsTest(TestCase):
fixtures = ['test_data.json']
def test_project_listing(self):
"""
Verify that the project listing page contains a... | in Project.objects.published():
self | .assertTrue(project in response.context['project_list'])
def test_verify_author_detail_pages(self):
"""
Verify that each author has a detail page and that the author is
contained within the page's context.
"""
for project in Project.objects.all():
re... |
import paho.mqtt.client as mqtt
import json, time
import RPi.GPIO as GPIO
from time import sleep
# The script as below using BCM GPIO 00..nn numbers
GPIO.setmode(GPIO.BCM)
# Set relay pins as output
GPIO.setup(24, GPIO.OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
US... | ORD = ""
# ----- | ----------------------------------
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/iot/control/")
print("Subscrib... |
#
# Copyright (c) 2021 Arm Limited and Contributors. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Power cycle devices using the 'Mbed TAS RM REST API'."""
import os
import json
import time
import requests
from .host_test_plugins import HostTestPluginBase
class HostTestPluginPowerCycleResetMethod(H... | get_id,
"switch_command": "ON",
}
],
}
result = False
# reset target
switch_off_req = self.__run_request(ip, port, switch_off_req)
if switch_off_req is None:
self.print_plugin_error("HOST: Failed to communicate with TA... | % switch_off_req["sub_requests"][0]["error"]
)
return result
def poll_state(required_state):
switch_state_req = {
"name": "switchResource",
"sub_requests": [
{
"resource_type": "mbed_pla... |
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from te... | self.send_message(msg_feefilter(feerate))
self.sync_with_ping()
class FeeFilterTest(BitcoinTestFramew | ork):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = False
def setup_network(self):
# Node1 will be used to generate txs which should be relayed from Node0
# to our test node
self.nodes = []
self.nodes.append(start_nod... |
import subprocess
"""
ideas from https://gist.github.com/godber/7692812
"""
class PdfInfo:
def __init__(self, filepath):
self.filepath = filepath
self.info = {}
self.cmd = "pdfinfo"
self.process()
def process(self):
labels = ['Title', 'Author', 'Creator', 'Pro | ducer', 'CreationDate', \
'ModDate', 'Tagged', 'Pages', 'Encrypted', 'Page size', \
'File size', 'Optimized', 'PDF version']
cmdOutput = subprocess.check_output([self.cmd, | self.filepath])
for line in cmdOutput.splitlines():
for label in labels:
if label in line:
self.info[label] = self.extract(line)
def isEncrypted(self):
return False if (self.info['Encrypted'][:2]=="no") else True
def extract(self, row):
... |
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or... | ons.output))
# We can't use options.input here, because 'input' is a python keyword.
with open(getattr(options, 'input'), 'r') as input_:
text = input_.read()
for from_pattern, to_text in replacement_mapping.items():
# Treat f | rom_pattern as a regex, with re.DOTALL (meaning dot captures
# newlines). To prevent . from being greedy, use a "?". E.g.:
#
# 'remove: {.*?}' will correctly handle:
#
# 'remove: { things we want removed } { things we want to keep }'
#
# because the . stops at the first '}'. See:
# http... |
/bin/python
# -*- coding:utf8 -*-
import os
import tensorflow as tf
from keras import layers
from keras.applications.imagenet_utils import _obtain_input_shape
from keras.backend.tensorflow_backend import set_session
from keras.engine.topology import get_source_inputs
from keras.layers import *
from keras.models import... | content_input = input_content_tensor
# Determine proper style input shape
input_style_shape = _obtain_input_shape(input_style_shape,
default_size=256,
min_size=48,
data_format=... | include_top=False)
if input_style_tensor is None:
style_input = Input(shape=input_style_shape)
else:
if not K.is_keras_tensor(input_style_tensor):
style_input = Input(tensor=input_style_tensor, shape=input_style_shape)
else:
style_input = input_style_tensor
... |
,
'>=': 'gte',
'<': 'lt',
'<=': 'lte'
}
class SearchException(Exception):
"""
Exception class for unparseable search queries
"""
def __init__(self, message):
self.message = message
def contact_search(org, query, base_queryset):
"""
Searches for contacts
:param org: th... | _simple(org, query, base_queryset) | , False
def contact_search_simple(org, query, base_queryset):
"""
Performs a simple term based search, e.g. 'Bob' or '250783835665'
"""
matches = ('name__icontains',) if org.is_anon else ('name__icontains', 'urns__path__icontains')
terms = query.split()
q = Q(pk__gt=0)
for term in terms:
... |
# -*- coding: cp1252 -*-
import urllib,urllib2,re,cookielib,string,os,sys
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
from resources.libs import main
#Mash Up - by Mash2k3 2012.
from t0mm0.common.addon import Addon
from resources.universal import playbackengine, watchhistory
addon_id = 'plugin.video.movie25'
selfAddo... | ')
def DIFFFB():
main.addDir('Level 1','http://www.fitnessblender.com/v/full-length-workouts/?all=1p=1&str=&time_min=&time_max=&cal_min=&cal_max=&difficulty[]=1&type[]=&equipment[]=&body_focus[]=',202,art+'/fitnessblender.png')
main.addDir('Level 2','http://www.fitnessblender.com/v/full-length-workouts/?all=... | []=&equipment[]=&body_focus[]=',202,art+'/fitnessblender.png')
main.addDir('Level 3','http://www.fitnessblender.com/v/full-length-workouts/?all=1p=1&str=&time_min=&time_max=&cal_min=&cal_max=&difficulty[]=3&type[]=&equipment[]=&body_focus[]=',202,art+'/fitnessblender.png')
main.addDir('Level 4','http://www.fitn... |
):
return self.model.objects.filter(element_type = 'number')
admin.site.register(NumberElementModel, NumberElementAdmin)
class RangeElementAdmin(GeoformElementAdmin):
form = RangeElementForm
fieldsets = (
(None, {
'fields': ('question',
'min_label',
... | soup = BeautifulSoup(html)
question_data['question'].append(soup.p.text)
if soup.find(attrs={'data-random': 'true'}):
question_data['randomize'] = True
labels = soup.f | ind_all('label')
for j, label in enumerate(labels):
if i == 0:
initial_data.append({u'label': [label.text.strip()]})
else:
initial_data[j]['label'].append(label.text.strip())
return super(RadioElementAdm... |
# Lint as: python3
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this | file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY ... | express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An abstract class for the Vizier client for both CAIP and uCAIP."""
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Account Analytic Defaults',
'version': '1.0',
'category': 'Accounting',
'description': """
Set default values for your analytic accounts.
==============================================
Allows ... | * Product
* Partner
* User
* Company
* Date
""",
'website': 'https://www.odoo.com/page/accounting',
'depends': ['sale_stock'],
'data': [
'security/ir.model.access.csv',
'security/account_analytic_default_security.xml',
'account_analytic_default_view.xml'
... | [],
'installable': True,
'auto_install': False,
}
|
import collections
import itertools
import nengo
import pacman103
from .config import Config
import connection
import ensemble
import node
import probe
import utils
class Assembler(object):
"""The Assembler object takes a built collection of objects and connections
and converts them into PACMAN vertices and... | def get_outgoing_connections(self, obj):
return [c for c in self.connections if c.pre_obj == obj]
Assembler.register_connection_builder(connection.generic_connection_builder)
Assembler.register_object_builder(ensemble.EnsembleLIF.assemble,
ensemble.IntermediateEnsembleLIF)... | emble_from_intermediate,
node.IntermediateFilter)
Assembler.register_object_builder(node.FilterVertex.assemble,
node.FilterVertex)
Assembler.register_object_builder(probe.DecodedValueProbe.assemble,
probe.IntermediateP... |
#
# Copyright (C) 2010, 2011, 2014 Stanislav Bohm
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any... | line_callback
self.exit_callback = exit_callback
def on_exit(self):
self.process.w | ait()
return self.safe_call(self.exit_callback, self.process.returncode)
def on_line(self, line, stream):
return self.safe_call(self.line_callback, line, stream)
class ConnectionThread(ReadLineThread):
def __init__(self, host, port, line_callback, exit_callback, connect_callback):
Re... |
from FindPathsPlugin import FindPathsPlugin
import tulipplugins
class FindPaths0(FindPathsPlugin):
""" Tulip plugin algorithm whic | h searches for 1- | hop paths """
def __init__(self, context):
FindPathsPlugin.__init__(self, context, 0)
# The line below does the magic to register the plugin to the plugin database
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("FindPaths0", "Find Nodes (Regex)", "Nathaniel Nelso... |
n
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable ... | owners=[admin])
self.login(username="gamma")
arguments = {
"filters": [{"col": "dashboard_title", "opr": "sw", "value": title[0:8]}]
| }
uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(arguments))
rv = self.client.get(uri)
self.assert200(rv)
data = json.loads(rv.data.d |
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2016 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either v... | te read only connections'))
# Check operator preferences
for operator_id, operator in six.iteritems(new_wiring_status['operators']):
if operator_id in old_wiring_status['operators']:
old_operator = old_wiring_status['operators'][operator_id]
| added_preferences = set(operator['preferences'].keys()) - set(old_operator['preferences'].keys())
removed_preferences = set(old_operator['preferences'].keys()) - set(operator['preferences'].keys())
updated_preferences = set(operator['preferences'].keys()).intersection(old_operator... |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListTy... | "load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in se | lf._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = ge... |
-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
# After the check of the copy source in the case of s3acl is valid,
# Swift3 che... | ket write permissions of the destination.
self.assertEqual(len(self.swift.calls_with_headers), 3)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, | headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl
def test_object_POST_error(self):
code = self._test_method_error('POST', '/bucket/object', None)
self.assertEq... |
# -*- coding: utf-8 -*-
"""
legi | t.helpers
~~~~~~~~~~~~~
Various Python helpers.
"""
import os
import platform
_platform = platform.system().lower()
is_osx = (_platform == 'darwin')
is_win = (_platform == 'windows')
is_lin = (_platform == 'linux')
def find_path_above(*names):
"""Attempt to locate given path by searching parent dirs."""
... | d = os.path.join(path, name)
if os.path.exists(joined):
return os.path.abspath(joined)
path = os.path.join('..', path)
|
import jso | n,os,shelve
import asyncio,sys
DATAFILENAME="data"
def set_user_id(new_id):
_local_data["user_id"]=new_id
def set_login_token(token):
_local_data["login_token"]=token
def load_data():
global _local_data
if(os.path.exists(os.path.join(get_current_path(),DATAFILENAME))):
with open(os.path.join(ge... | save_data():
with open(os.path.join(get_current_path(),DATAFILENAME), 'w') as f:
f.write(json.dumps(_local_data))
def get_user_id():
return _local_data.get("user_id")
def get_login_token():
return _local_data.get("login_token")
def get_template_path():
return os.path.join(get_current_pat... |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup ------------------------------------------------------------... | ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, | category)
texinfo_documents = [
(master_doc, 'msgiver', 'msgiver Documentation',
author, 'msgiver', 'One line description of project.',
'Miscellaneous'),
] |
from __future__ import absolute_import, division, | print_function, unicode_literals
from django.contrib.auth.decorators import user_passes_test
from django_otp import user_has_device
from django_otp.conf import settings
def otp_required(view=None, redirect_field_name='next', login_url=None, if_configured=False):
"""
Similar to :func:` | ~django.contrib.auth.decorators.login_required`, but
requires the user to be :term:`verified`. By default, this redirects users
to :setting:`OTP_LOGIN_URL`.
:param if_configured: If ``True``, an authenticated user with no confirmed
OTP devices will be allowed. Default is ``False``.
:type if_con... |
#
import common
__version__ = common.version
d | el common
|
import settings
|
#!/usr/bin/env python3
import os
import logging
from datetime import datetime
from settings import JSONDIR
from defs import load_data
from san_env import get_apps
debug_records_flag = False
def save(appname, relations):
apps = get_apps()
for filename, modelname, filters in relations:
records = load_... | (**record) for record in records])
else:
for record in records:
try:
| model(**record).save()
except:
print('== {} =='.format(modelname))
for key in sorted(record.keys()):
print(key, record[key] if key in record else '')
print('\n')
logging.info('--- file: %s -> model: %s | %s recor... |
totalF1 = 0
for r in range(nr):
startRow, endRow = X.indptr[r], X.indptr[r+1]
xInds = X.indices[startRow:endRow]
xVals = X.data[startRow:endRow]
rowLen = endRow - startRow
scores = np.zeros(nl)
for (ind, val) in zip(xInds, xVals):
weightVals ... | p.open(dataFilename)
else:
datafile = open(dataFilename)
nr = 0
numVals = 0
numLabVals = 0
keeperCounter = 0
featCounts = {}
line_process_counter = 0
for line in datafi | le:
line_process_counter += 1
if line_process_counter % 100 == 0:
print "pass 1 of 4: " + str(line_process_counter)
keeperCounter += trainFrac
if keeperCounter < 1:
continue
else:
keeperCounter -= 1
splitLine = line.split('... |
#!/usr/bin/env python3
from anormbookmarker.test.test_enviroment import *
with self_contained_session(CONFIG.database_timestamp) as session:
BASE.metadata.create_all(session.bind)
# make a tag to make an alias to
aa = T | ag.construct(session=session, tag='a a')
session.commit()
db_result = [('select COUNT(*) from alias;', 0),
('sel | ect COUNT(*) from aliasword;', 0),
('select COUNT(*) from bookmark;', 0),
('select COUNT(*) from filename;', 0),
('select COUNT(*) from tag;', 1),
('select COUNT(*) from tag_relationship;', 0),
('select COUNT(*) from tagbookmarks;', 0),
('sel... |
eference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.reftest(k)
val = kret.getValue()
self.verifyValue("me oh my reftest", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by pointer reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_b... |
self.verifyValue("me oh my derivedsmartptrpointerreftest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# derived pass by pointer
k = li_boost_shared_ptr.KlassDeriv | ed("me oh my")
kret = li_boost_shared_ptr.derivedpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedpointertest-Derived", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# derived pass by ref
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shar... |
# -*- coding: utf-8 -*-
# See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
@api.model
def _get_advance_payment(self):
ctx = self.env.context.copy()
if ... | 'Invoiceable lines\
(deduct down payments)'),
| ('percentage',
'Down payment (percentage)'),
('fixed',
'Down payment (fixed\
amoun... |
import time
import json
import random
from flask import Flask, request, current_app, abort
from functools import wraps
from cloudbrain.utils.metadata_info import (map_metric_name_to_num_channels,
get_supported_devices,
get_metrics_... | ied output for JSONP"""
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f()) + ')'
return current_app.response_class(content,
... |
@app.route('/data', methods=['GET'])
@support_jsonp
def data():
"""
GET metric data
:return:
"""
# return last 5 microseconds if start not specified.
default_start_timestamp = int(time.time() * 1000000 - 5)
device_id = request.args.get('device_id', None)
device_name = request.args.g... |
import sys
import re
# Copyright
# =========
# Copyright (C) 2015 Trustwave Holdings, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your optio... | ogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this ... | pt that decrypts the encoded config files
# for Cherry Picker malware. It is encoded with a XOR string
#
# Input: filename or none to use the default kb852310.dll filename
#
# Example: python cherryPicker.py
#
# Example: python cherryPicker.py filename.dll
#
# Output: config.bin (decrypted config file)
xor_key = ['0... |
from operator import itemgetter
__author__ = 'davide'
def pairwise(l):
for t in zip(l, l[1:]):
yield t
def pijavskij(f, L, a, b, eps=1E-5):
l = [(a, f(a)), (b, f(b))]
while True:
imin, Rmin, xmin = -1, float("inf"), -1
fo | r i, t in enumerate(pairwise(l)):
(xi, fi), (xj, fj) = t
R = (fi + fj - L * (xj - xi)) / 2
if R < Rmin:
imin = i
|
Rmin = R
xmin = (xi + xj) / 2 - (fj - fi) / (2 * L)
if l[imin + 1][0] - l[imin][0] < eps:
return l[imin], l[imin + 1]
l.append((xmin, f(xmin)))
l.sort(key=itemgetter(0))
print(l)
if __name__ == "__main__":
f = lambda x: x ** 4
... |
ibrary build action
act = SCons.Action.Action(BuildLibInstallAction, 'Install compiled library... $TARGET')
bld = Builder(action = act)
Env.Append(BUILDERS = {'BuildLib': bld})
# parse rtconfig.h to get used component
PreProcessor = SCons.cpp.PreProcessor()
f = file('rtconfig.h', 'r')
conte... | if src_group.has_key('CPPPATH'):
src_group['CPPPATH'] = src_group[ | 'CPPPATH'] + group['CPPPATH']
else:
src_group['CPPPATH'] = group['CPPPATH']
if group.has_key('CPPDEFINES'):
if src_group.has_key('CPPDEFINES'):
src_group['CPPDEFINES'] = src_group['CPPDEFINES'] + group['CPPDEFINES']
else:
src_group['CPPDEFINES'] = group['C... |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish... | RACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61970.Wires.Conductor import Conductor
class DCLineSegment(Conductor):
"""A wire or combination of wires not insulated from one another, with consistent electrical chara... | tion of wires not insulated from one another, with consistent electrical characteristics, used to carry direct current between points in the DC region of the power system.
"""
def __init__(self, dcSegmentInductance=0.0, dcSegmentResistance=0.0, *args, **kw_args):
"""Initialises a new 'DCLineSegment' in... |
doLayout(sections16, sections32seg, sections32flat):
# Determine 16bit positions
textsections = getSectionsPrefix(sections16, '.text.')
rod | atasections = (getSectionsPrefix(sections16, '.rodata.str1.1')
+ getSectionsPrefix(sections16, '.rodata.__func__.'))
datasections = getSectionsPrefix(sections16, '.data16.')
fixedsections = getSectionsPrefix(sections16, '.fixedaddr.')
locs16fixed, firstfixed = fitSections(fixedsection... | i in textsections+rodatasections+datasections
if i not in prunesections]
locs16, code16_start = getSectionsStart(remsections, firstfixed)
locs16 = locs16 + locs16fixed
locs16.sort()
# Determine 32seg positions
textsections = getSectionsPrefix(sections32seg, '.text.')
rodatase... |
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 23 11:27:24 2017
@author: hd_mysky
"""
import pymongo as mongo
import pandas as pd
import os
from transform import harmonize_data
BASE_DIR = os.path.dirname(__file__) #获取当前文件的父目录绝对路径
file_path = os.path.join(BASE_DIR,'dataset','source_simple.csv')
conn = mongo.MongoCl... | _data(train)
train_data.to_csv(file_path)
print('————— | —————数据转换成功——————————')
|
from __future__ impo | rt absolute_import
import unittest
import deviantart
from .helpers import mock_response, optional
from .api_credentials import CLIENT_ID, CLIENT_SECRET
class ApiTest(unittest.TestCase):
@optional(CLIENT_ID == "", mock_response('token'))
def setUp(self):
self.da = deviantart.Api(CLIENT_ID, CLIENT_SEC... | ", user.username)
self.assertEqual("devart", repr(user))
@optional(CLIENT_ID == "", mock_response('deviation'))
def test_get_deviation(self):
deviation = self.da.get_deviation("234546F5-C9D1-A9B1-D823-47C4E3D2DB95")
self.assertEqual("234546F5-C9D1-A9B1-D823-47C4E3D2DB95", deviation.devi... |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, ... | R CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def testFunction(request):
return "PASS"
import os
# os.environ["FOO"] is only available | at runtime.
print(os.environ["FOO"])
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsImageCache.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__a... | " % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def imageFetched(self):
| self.fetched = True
def waitForFetch(self):
self.fetched = False
while not self.fetched:
QCoreApplication.processEvents()
def testRemoteImage(self):
"""Test fetching remote image."""
url = 'http://localhost:{}/qgis_local_server/sample_image.png'.format(str(Te... |
"""
TwoDWalker.py is for controling the avatars in a 2D Scroller game environment.
"""
from GravityWalker import *
from panda3d.core import ConfigVariableBool
class TwoDWalker(GravityWalker):
"""
The TwoDWalker is primarily for a 2D Scroller game environment. Eg - Toon Blitz minigame.
TODO: This class is... | the arrow keys and update the avatar.
"""
# get the button states:
jump = inputState.isSet("forward")
if self.lifter.isOnGround():
if self.isAirborne:
self.isAirborne = 0
assert self.debugPrint("isAirborne | 0 due to isOnGround() true")
impact = self.lifter.getImpactVelocity()
messenger.send("jumpLand")
assert self.isAirborne == 0
self.priorParent = Vec3.zero()
else:
if self.isAirborne == 0:
assert self.debugPrint("isAirborne 1 due ... |
ForSpelling = str.maketrans({
'ſ': 's', 'ffi': 'ffi', 'ffl': 'ffl', 'ff': 'ff', 'ſt': 'ft', 'fi': 'fi', 'fl': 'fl', 'st': 'st'
})
def spellingNormalization (sWord):
return unicodedata.normalize("NFC", sWord.translate(_xTransCharsForSpelling))
_xTransCharsForSimplification = str.maketrans({
'à': 'a', 'é': ... | "cu", "kk", "kh"),
"K": ("QU", "CK", "CH", "CU", "KK", "KH"),
"l": ("ll",),
"L": ("LL",),
"m": ("mm", "mn"),
"M": ("MM", "MN"),
"n": ("nn", "nm", "mn"),
"N": ("NN", "NM", "MN"),
"o | ": ("au", "eau"),
"O": ("AU", "EAU"),
"œ": ("oe", "eu"),
"Œ": ("OE", "EU"),
"p": ("pp", "ph"),
"P": ("PP", "PH"),
"q": ("qu", "ch", "cq", "ck", "kk"),
"Q": ("QU", "CH", "CQ", "CK", "KK"),
"r": ("rr",),
"R": ("RR",),
"s": ("ss", "sh"),
"S": ("SS", "SH"),
"t": ("tt", "th"),... |
#! /usr/bin/python2
# vim: set fileencoding=utf-8
from dateutil.parser import parse
from subprocess import check_output
from shutil import copy
import datetime
import sys
import os.path
import isoweek
DATE_FORMAT = '%Y%m%d'
START = """\documentclass[a4paper,oneside,draft,
notitlepage,11pt,svgnames]{scrreprt}
\\newcomma... | ment}"""
MD_ACTIVITY = """# Activity {.unnumbered}
~~~~
"""
def create(date):
filename = date.strftime(DATE_FORMAT)
month = date.strftime('%B')
day = date.strftime('%d')
with open('template.tex', 'r') as t:
content = t.read()
content = content.replace('MONTH', month)
content =... | py('content.md', filename+'.md')
print('gvim {}'.format(filename+'.md'))
def week(date):
week = isoweek.Week.withdate(date)
name = 'w{}.tex'.format(week.week)
together([week.day(d) for d in range(7)], name)
def together(dates, name):
include = '\chapter{{{}}}\n\input{{{}}}'
res = [include.fo... |
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.arg_spec import ArgumentSpecVa... | t',
{'users': {'type': 'dict'}},
{'users': ['one', 'two']},
{'users': ['one', 'two']},
set(),
"unable to convert to dict: <class 'list'> cannot be converted to a dict",
),
(
'invalid-bool',
{'bool': {'type': 'bool'}},
{'bool': {'k': 'v'}},
... | type': 'float'}},
{'float': 'hello'},
{'float': 'hello'},
set(),
"unable to convert to float: <class 'str'> cannot be converted to a float",
),
(
'invalid-bytes',
{'bytes': {'type': 'bytes'}},
{'bytes': 'one'},
{'bytes': 'one'},
set(),
... |
# -*- coding: utf-8 -*-
VSVersionInfo(
ffi=FixedFileInfo(
filevers=(4, 0, 0, 0),
prodvers=(4, 0, 0, 0),
mask=0x3f,
flags=0x0,
OS=0x4,
fileType=0x1,
subtype=0x0,
date=(0, 0)
),
kids=[
StringFileInfo(
[
StringT... |
[StringStruct('CompanyName', u'CommandBrain'),
StringStruct('FileDescription', u'Programm for create Usecase | diagram'),
StringStruct('FileVersion', '1.0'),
StringStruct('LegalCopyright', u'CommandBrain'),
])
]),
VarFileInfo([VarStruct('Translation', [1033, 1200])])
]
)
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, SylvainCecchetto
# GNU General Public License v2.0+ (see LICENSE.txt or https | ://www.gnu.org/licenses/gpl-2.0.txt)
# This file is part of Catch-up TV & More
from __future__ import unicode_literals
import re
from codequick import Resolver
import urlquick
from resources.lib import resolver_proxy
# TO DO
# Add Replay
URL_LIVE = 'https://www.paramountchannel.it/tv/diretta'
@Resolver.registe... | URL_LIVE, max_age=-1)
video_uri = re.compile(r'uri\"\:\"(.*?)\"').findall(resp.text)[0]
account_override = 'intl.mtvi.com'
ep = 'be84d1a2'
return resolver_proxy.get_mtvnservices_stream(
plugin, video_uri, False, account_override, ep)
|
#!/usr/bin/env python
"""tvnamer - Automagical TV episode renamer
Uses data from www.t | hetvdb.com (v | ia tvdb_api) to rename TV episode files from
"some.show.name.s01e01.blah.avi" to "Some Show Name - [01x01] - The First.avi"
"""
__version__ = "3.0.0"
__author__ = "dbr/Ben"
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import math
import torch
import to... | e, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
cont... | sider SparseAdam instead')
amsgrad = group['amsgrad']
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
# Exponential moving average of gradient values
state['ex... |
import logging
from typing import List
import numpy as np
import torch
import torch.nn as nn
from pinta.model.model_base import NN
LOG = logging.getLogger("ConvRNN")
class ConvRNN(NN):
"""
Combination of a convolutional front end and an RNN (GRU) layer below
>> see https://gist.github.com/spro/c87cc706... | idden_size, n_gru_layers, dropout=0.01, batch_first=True
)
| # Ends with a fully connected layer
self.out = nn.Linear(hidden_size, self.output_size)
# Load from trained NN if required
if filename is not None:
self._valid = self.load(filename)
if self._valid:
return
LOG.warning("Could not load t... |
from | .utils | import Impl
|
import os
import threading
import Queue
# Windows import
import win32file
import win32pipe
import win32api
import win32con
import win32security
import win32process
import win32event
class Win32Spawn(object):
def __init__(self, cmd, shell=False):
self.queue = Queue.Queue()
self.is_terminated = Fal... | win32file.ReadFile(handle, bytesToRead, None)
if data:
self.queue.put_nowait(data)
except win32api.error:
finished = 1
if finished:
return
def start_pipe(self):
def worker(pipe):
return pipe.w | ait()
thrd = threading.Thread(target=worker, args=(self, ))
thrd.start()
|
# -*- coding: utf-8 -*-
import pytest
from thefuck.shells.tcsh import Tcsh
@pytest | .mark.usefixtures('isfile', 'no_memoize', 'no_cache')
class TestTcsh(object):
@pytest.fixture
def shell(self):
return Tcsh()
@pytest.fixture(autouse=True)
def Popen(self, mocker):
mock = mock | er.patch('thefuck.shells.tcsh.Popen')
mock.return_value.stdout.read.return_value = (
b'fuck\teval $(thefuck $(fc -ln -1))\n'
b'l\tls -CF\n'
b'la\tls -A\n'
b'll\tls -alF')
return mock
@pytest.mark.parametrize('before, after', [
('pwd', 'pwd'),
... |
# coding=utf-8
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicab... | """Raises ValueError if the value does not sum to one."""
| del instance, attribute # Unused.
value = np.array(value)
if not np.isclose(np.sum(value), 1):
raise ValueError("Array must sum to one. Got %s." % np.sum(value))
def _check_nonnegative(instance, attribute, value):
"""Raises ValueError if the value elements are negative."""
del instance, attribute # Un... |
from collections import defaultdict
class Solution(object):
def removeBoxes(self, boxes):
"""
:type boxes: List[int]
:rtype: int
"""
unq, cnt = [], []
for b in boxes:
if not unq or b != unq[-1]:
unq.append(b)
cnt.append(1)... | merate(unq):
pre[b].append(i)
dp[i][i] = cnt[i] ** 2
for j in range(i - 1, -1, -1):
theMax = dp[i - 1][j] + cnt[i] ** 2
npre = len(pre[b]) if unq[j] != unq[i] else len(pre[b]) - 1
for kk in range(npre - 1, -1, -1):
... | k] + dp[k - 1][j])
else:
break
if unq[j] == unq[i]:
poss = pre[b][kk:]
nposs = len(poss)
span = []
for p in range(nposs - 1):
span.append(dp[poss[p + 1] - ... |
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo (info@vauxoo.com)
####################################... | neral Public License for more details.
#
# You should have rec | eived a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class res_partner(osv.Model):
_inherit = 'res.partner'
_order =... |
s is None:
self.transportModel = self.transportFunction()
else:
# order for reproductibility
self.params = sorted(self.params.items())
# if grid search
if self.isGridSearch():
# compute combinaison for each param
self.f... | ransportModel.fit(Xs, ys=ys, Xt=Xt, yt=yt)
if self.feedback:
pushFeedback(20, feedback=self.feedback)
return self.transportModel
def predictTransfer(self, imageSource, outRaster, mask=None,
NODATA=-9999, feedback=None | , norm=False):
"""
Predict model using domain adaptation.
Parameters
----------
model : object
Model generated from learnTransfer function.
imageSource : str
Path of image to adapt (source image)
outRaster : str
Path of tiff im... |
from datetime import timedelta
from django.conf import settings
from django.utils.timezone import now
from rest_framework import status, pagination
from rest_framework.generics import CreateAPIView, DestroyAPIView, ListAPIView
from rest_framework.response import Response
from churchill.api.v1.shots.serializers import... | ion):
page_size = settin | gs.CALENDAR_WEEK_SIZE * 7
class ShotsItemCalendarView(ListAPIView):
serializer_class = ShotDateSerializer
pagination_class = CalendarPagination
def get_queryset(self):
weeks_offset = int(self.request.query_params.get("weeks_offset", 0))
return get_shots_calendar(self.request.user, weeks_... |
"""Thetests for the Modbus sensor component."""
import pytest
from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.modbus.const import (
CALL_TYPE_COIL,
CALL_TYPE_DISCRETE,
CONF_INPUT_TYPE,
CONF_LAZY_ERROR,
CONF_SLAVE_COUNT,
)
from homeassistant.c... | 51,
C | ONF_INPUT_TYPE: CALL_TYPE_COIL,
CONF_SCAN_INTERVAL: 10,
CONF_LAZY_ERROR: 2,
},
],
},
],
)
@pytest.mark.parametrize(
"register_words,do_exception,start_expect,end_expect",
[
(
[0x00],
True,
... |
ate.
# If we ask for the window state here it will never realize that
# we have been maximized because the window state change is processed
# after the resize event. Using a timer event causes it to happen
# after all the events have been processsed.
size = event.size()
... | state['y'])
except:
result = False
try:
if state['maximized']:
self.showMaximized()
try:
self._unmaximized_size = (state['width'], state['height'])
| except:
pass
except:
result = False
self._apply_state_applied = result
return result
def export_state(self):
"""Exports data for view save/restore"""
state = self.windowState()
maximized = bool(state & Qt.WindowMaximized)
... |
"""Support for Z-Wave fans."""
import math
from homeassistant.components.fan import (
DOMAIN as FAN_DOMAIN,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.percentage import (
percentag... | evice on."""
await self.async_set_percentage(percentage) |
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
self.values.primary.send_value(0)
@property
def is_on(self):
"""Return true if device is on (speed above 0)."""
return self.values.primary.value > 0
@property
def percentage(self):
"""Ret... |
s_global', is_flag=True,
help="Show global metadata instead of local")
@click.option('--filter', metavar='<filter>',
help='Filter for name, description, blob, global,'
' networkNotificationDisabled, ID, externalID')
@click.pass_context
def metadata_list(ctx, filter, entity... | %s" % metadata_id)
exit(1)
params = {'metadataTagIDs': []}
change = False
for t in existing_tag:
if t not in tag:
para | ms['metadataTagIDs'].append(t)
else:
change = True
if not change:
print("Warning: none of given tag exists in metadata %s" % metadata_id)
exit(1)
ctx.obj['nc'].put(request, params)
result = ctx.obj['nc'].get(request)[0]
print_object(result, only=ctx.obj['show_only'], ... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ***** BEGIN LICENSE BLOCK *****
# Copyright (C) 2012-2014, Hayaki Saito
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restric... |
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or ... | E SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHE... |
,
'airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator',
'airflow.providers.google.cloud.opera... | rators.dlp.CloudDLPReidentifyContentOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPU | pdateDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator',
... |
# -*- coding: utf-8 - | *-
# Generated by Django 1.11 | .7 on 2018-01-04 10:49
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('polls', '0011_remove_vote_endorse'),
]
operations = [
migrations.RenameField(
model_name='simplevote',
old_na... |
# http://www.k12reader.com/dolch-word-list-sorted-alphabetically-by-grade-with-nouns/
f = open("data1.txt")
header = f.readline()
from collections import OrderedDict
database = OrderedDict()
for item in header.split():
database[item] = []
for line in f.readlines():
items = line.rstrip().split('\t')
... | two colums for nouns
| # And we collapsed into one
if index > 5:
index = 5
category = database.keys()[index]
database[category].append(item)
|
# -*- coding: utf-8 -*-
import numpy as np
import pytest
import pyls
@pytest.fixt | ure(scope='session')
def testdir(tmpdir_factory):
data_dir = tmpdir_factory.mktemp('data')
return str(data_dir)
@pytest.fixture(scope='session')
def mpls_results():
Xf = 1000
subj = 100
rs = np.random.RandomState(1234)
return pyls.meancentered_pls(rs.rand(subj, Xf), n_cond=2, |
n_perm=10, n_boot=10, n_split=10)
@pytest.fixture(scope='session')
def bpls_results():
Xf = 1000
Yf = 100
subj = 100
rs = np.random.RandomState(1234)
return pyls.behavioral_pls(rs.rand(subj, Xf), rs.rand(subj, Yf),
n_perm=10, n_boot=... |
from mcpi.minecraft i | mport Minecraft
from time import sleep
mc = Minecraft.create()
class mic:
x=0
y=0
z=0
u=1
def usid(self):
t=mc.getPlayerEntityIds()
print t
def uspos(self,wkj):
self.x,self.y,se | lf.z = mc.entity.getPos(wkj)
print self.x,self.y,self.z
def wdfe(self,item):
mc.setBlock(self.x,self.y,self.z, item)
def tnt(self,item):
mc.setBlock(self.x,self.y,self.z, item,1)
s=mic()
s.usid()
#s.uspos(57369)
s.uspos(1)
s.wdfe(46)
#s.uspos(20514)
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure function-level linking setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32... | String(binary, search_for, should_exist):
output = test.run_dumpbin('/headers', binary)
if should_exist and search_for not in output:
print 'Did not find "%s" in %s' % (search_for, binary)
test.fail_test()
elif not should_exist and search_for in output:
print 'Found "%s" in %s (and s... | rmat == 'ninja' else '\\'
return 'obj\\%s%s%s' % (proj, sep, obj)
look_for = '''COMDAT; sym= "int __cdecl comdat_function'''
# When function level linking is on, the functions should be listed as
# separate comdat entries.
CheckForSectionString(
test.built_file_path(Object('test_fll_on', '... |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-01-30 14:30
from __future__ import unicode_liter | als
import enum
from django.db import migrations
import enumfields.fields
class TrxType(enum.Enum):
FINALIZED = 0
PENDING = 1
CANCELLATION = 2
class TrxStatus(enum.Enum):
PENDING = 0
FINALIZED = 1
REJECTED = 2
CANCELED = 3
|
class Migration(migrations.Migration):
dependencies = [
('wallet', '0005_auto_20160309_1722'),
]
operations = [
migrations.AlterField(
model_name='wallettransaction',
name='trx_status',
field=enumfields.fields.EnumIntegerField(default=1, enum=TrxStatus... |
# C | opyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by app... | under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Res... |
# TODO inspect for Cython (see sagenb.misc.sageinspect)
from __future__ import print_function
from nose.plugins.skip import SkipTest
from nose.tools import assert_true
from os import path as op
import sys
import inspect
import warnings
import imp
from pkgutil import walk_packages
from inspect import getsource
import... |
parts.append(func.im_class.__name__)
parts.append(func.__name__)
return '.'.join(parts)
# functions to ignore args / docstring of
_docstring_ignores = [
'mne.io.write', # always ignore these
'mne.fixes._in1d', # fix function
'mne.epochs.average_movements', # deprecated pos param
]
_ta... | ta has a tab
]
def check_parameters_match(func, doc=None):
"""Helper to check docstring, returns list of incorrect results"""
incorrect = []
name_ = get_name(func)
if not name_.startswith('mne.') or name_.startswith('mne.externals'):
return incorrect
if inspect.isdatadescriptor(func):
... |
ayers.core import Dense, Activation, Dropout
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU
from keras.layers.normalization import BatchNormalization
from keras.utils import np_utils
from keras.layers import Merge
from keras.layers import TimeDistributed, Lambda
from keras.la... | ch helps prevent overfitting.'''
model3.add(Dropout(0.2))
model3.add(Con | volution1D(nb_filter=nb_filter,
filter_length=filter_length,
border_mode='valid',
activation='relu',
subsample_length=1))
model3.add(GlobalMaxPooling1D())
model3.add(Dropout(0.2))
model3.add(Dense(300))
model3.add(Drop... |
#!/usr/bin/env python
# coding=utf-8
"""288. An enormous factorial
https://projecteuler.net/problem=288
For any prime p the number N(p,q) is defined by N(p,q) = ∑n=0 to q Tn*pn
with Tn generated by the following random numb | er generator:
S0 = 290797
Sn+1 = Sn2 mod 50515093
Tn = Sn mod p
Let Nfac(p,q) be the factorial of N(p,q).
Let NF(p,q) be the number of factor | s p in Nfac(p,q).
You are given that NF(3,10000) mod 320=624955285.
Find NF(61,107) mod 6110
"""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.