hexsha
stringlengths 40
40
| size
int64 6
782k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
237
| max_stars_repo_name
stringlengths 6
72
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
53k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
184
| max_issues_repo_name
stringlengths 6
72
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
27.1k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
184
| max_forks_repo_name
stringlengths 6
72
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
12.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 6
782k
| avg_line_length
float64 2.75
664k
| max_line_length
int64 5
782k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7fc1eaf126fdc8571f5a1aa0495912c04e0c74d3
| 2,893 |
py
|
Python
|
src/ztc/nginx/timelog.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
src/ztc/nginx/timelog.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
src/ztc/nginx/timelog.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
#!/usr/bin/env python
"""
Nginx TimeLog check class: calculates min/avg/max upstream response times.
Usage:
1. configure time_log log format in nginx config (http section):
log_format time_log '$upstream_response_time $request <anything you want>';
2. Add timelog log to all servers/locations you need to monitor:
access_log /var/log/nginx/time.log time_log;
3. Check log path on config file /etc/ztc/nginx.conf
4. Make sure zabbix can execute nginx_reqtime.py script under root (to allow
cleaning of the log)
5. It might be good idea to place this log to tmpfs.
This file is part of ZTC and distributed under the same license.
http://bitbucket.org/rvs/ztc/
Copyright (c) 2011 Vladimir Rusinov <[email protected]>
"""
from ztc.check import ZTCCheck
from ztc.store import ZTCStore
class NginxTimeLog(ZTCCheck):
""" Nginx upsteam response min/avg/max calculation """
name = 'nginx'
OPTPARSE_MIN_NUMBER_OF_ARGS = 1
OPTPARSE_MAX_NUMBER_OF_ARGS = 1
def _get(self, metric=None, *args, **kwargs):
return self.get_resptime(metric)
def get_resptime(self, metric):
""" get min/avg/max response time """
data = None
if metric != 'avg':
data = self.read_from_store()
if not data:
data = self.read_timelog()
self.save_to_store(data)
return data[metric]
def read_timelog(self):
""" really open timelog and calculate data """
mn = -1.0
mx = -1.0
avg = 0.0
n = 0
fn = self.config.get('timelog', '/var/log/nginx/time.log')
try:
f = open(fn, 'a+')
for l in f.readlines():
if l.startswith('-'):
# skip non-upstream lines with no $upstream_response_time
continue
r = l.split()[0] # response time should be in first col
r = float(r)
if mn < 0:
mn = r
else:
mn = min(r, mn)
mx = max(r, mx)
self.logger.debug("step %i: avg=%.2f, max=%.2f, min=%.2f" %
(n, avg, mx, mn))
avg += r
n += 1
f.truncate(0)
f.close()
except IOError:
self.logger.exception("I/O error on time log")
if n > 0:
avg = avg / n
else:
self.logger.warn('there was no new records in time log')
# set mn, mx = 0 if no avg data present
mn = max(0, mn)
mx = max(0, mx)
return {'min': mn,
'max': mx,
'avg': avg}
def save_to_store(self, data):
st = ZTCStore('nginx_reqtime', self.options)
st.set(data)
def read_from_store(self):
st = ZTCStore('nginx_reqtime', self.options)
return st.get()
| 28.93 | 77 | 0.555133 |
3d012978fab5574b4b0eaed49cd7643bab61e117
| 851 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/patches/v9_0/set_schedule_date_for_material_request_and_purchase_order.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
frappe-bench/apps/erpnext/erpnext/patches/v9_0/set_schedule_date_for_material_request_and_purchase_order.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v9_0/set_schedule_date_for_material_request_and_purchase_order.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for doctype in ("Material Request", "Purchase Order"):
frappe.reload_doctype(doctype)
frappe.reload_doctype(doctype + " Item")
if not frappe.db.has_column(doctype, "schedule_date"):
continue
#Update only submitted MR
for record in frappe.get_all(doctype, filters= [["docstatus", "=", 1]], fields=["name"]):
doc = frappe.get_doc(doctype, record)
if doc.items:
if not doc.schedule_date:
schedule_dates = [d.schedule_date for d in doc.items if d.schedule_date]
if len(schedule_dates) > 0:
min_schedule_date = min(schedule_dates)
frappe.db.set_value(doctype, record,
"schedule_date", min_schedule_date, update_modified=False)
| 35.458333 | 91 | 0.726204 |
3d4551b855902436177ea00924846cf8238891dd
| 3,201 |
py
|
Python
|
Project Euler Questions 1 - 10/Project Euler Question 8.py
|
Clayton-Threm/Coding-Practice
|
6671e8a15f9e797338caa617dae45093f4157bc1
|
[
"MIT"
] | 1 |
2020-02-11T02:03:02.000Z
|
2020-02-11T02:03:02.000Z
|
Project Euler Questions 1 - 10/Project Euler Question 8.py
|
Clayton-Threm/Coding-Practice
|
6671e8a15f9e797338caa617dae45093f4157bc1
|
[
"MIT"
] | null | null | null |
Project Euler Questions 1 - 10/Project Euler Question 8.py
|
Clayton-Threm/Coding-Practice
|
6671e8a15f9e797338caa617dae45093f4157bc1
|
[
"MIT"
] | null | null | null |
#Project Euler Question 8
#The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.
#73167176531330624919225119674426574742355349194934
#96983520312774506326239578318016984801869478851843
#85861560789112949495459501737958331952853208805511
#12540698747158523863050715693290963295227443043557
#66896648950445244523161731856403098711121722383113
#62229893423380308135336276614282806444486645238749
#30358907296290491560440772390713810515859307960866
#70172427121883998797908792274921901699720888093776
#65727333001053367881220235421809751254540594752243
#52584907711670556013604839586446706324415722155397
#53697817977846174064955149290862569321978468622482
#83972241375657056057490261407972968652414535100474
#82166370484403199890008895243450658541227588666881
#16427171479924442928230863465674813919123162824586
#17866458359124566529476545682848912883142607690042
#24219022671055626321111109370544217506941658960408
#07198403850962455444362981230987879927244284909188
#84580156166097919133875499200524063689912560717606
#05886116467109405077541002256983155200055935729725
#71636269561882670428252483600823257530420752963450
#Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?
def adjacent(x):
long_number = 7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450
long_number_list = []
long_number_list[:] = str(long_number)
result = 1
highest_adjacent = 0
min_range = 0
max_range = x
long_number_list = [int(i) for i in long_number_list]
for num in long_number_list:
adjacent_check = (long_number_list[min_range:max_range])
if 0 not in adjacent_check:
for check in adjacent_check:
result = result * (check)
if (result > highest_adjacent):
highest_adjacent = result
highest_list = adjacent_check.copy()
adjacent_check.clear()
result = 1
min_range += 1
max_range += 1
#print (highest_list, "is the highest", x, "adjacent term list.")
highest_adjacent = ("{:,}".format(highest_adjacent))
return highest_adjacent
print(adjacent(13), "is the greatest product.")
| 58.2 | 1,018 | 0.860356 |
1840e87be59bd6818b0b1170d5b7b76a9db29ded
| 1,824 |
py
|
Python
|
python/pyopenGL/ogl1/ogl_8_multiple_plots.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/pyopenGL/ogl1/ogl_8_multiple_plots.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/pyopenGL/ogl1/ogl_8_multiple_plots.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
#// last done till 43 pg no do the graph inequalities the next day.
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import sys
#from numpy import *
import numpy as np
import math
def init():
glClearColor(1.0,1.0,1.0,1.0)
gluOrtho2D(-5.0,5.0,-5.0,5.0)
def plotfunc():
glClear(GL_COLOR_BUFFER_BIT)
glColor3f(0.0,0.0,0.0) # color
glPointSize(3.0)
for x in np.arange(-5.0,5.0,0.01):
#y=x*x-2
#y=x**3-3*x-1
#y=x**4-5*x**3+x**2-3*x-1
#y=math.sin(x)
#y=math.sin(3*x)
a=7*x**2
y=2*math.cos(x)*math.sin(x)#+x**3-x**2
k=math.sqrt(5**2-x**2)
# can replace almost any function here!
#y=x**2
b=x**3
#y=x**4+7*x
#glBegin(GL_POINTS)
glBegin(GL_POINTS)
glColor3f(0.9,0.0,0.9) # color
glVertex2f(x,y)
glColor3f(0.3,0.5,0.0) # color
glVertex2f(x+0.5,y+0.5)
glColor3f(0.4,0.4,0.5) # color
glVertex2f(x,a)
glColor3f(0.0,5.0,0.7) # color
glVertex2f(x,b)
glColor3f(0.9,0.5,0.7) # color for circle
glVertex2f(x,k)
glColor3f(0.0,5.0,0.7) # color
glVertex2f(x,-k)
glEnd()
# adding coordinates
glBegin(GL_LINES)
glVertex2f(-5.0,0.0)
glVertex2f(5.0,0.0)
glVertex2f(0.0,5.0)
glVertex2f(0.0,-5.0)
glEnd()
glFlush()
def main():
glutInit(sys.argv) # tells the python we are going to be displaying GLUT style graphics
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)
glutCreateWindow("Plot Points")
glutInitWindowSize(400,400)
glutInitWindowPosition(50,50)
glutDisplayFunc(plotfunc)
init()
glutMainLoop()
main()
| 26.057143 | 120 | 0.542215 |
62fc25c9eb93dfcc9505716fcea3569679a26014
| 5,230 |
py
|
Python
|
rbac/ledger_sync/inbound/listener.py
|
akgunkel/sawtooth-next-directory
|
a88833033ab30e9091479a38947f04c5e396ca46
|
[
"Apache-2.0"
] | null | null | null |
rbac/ledger_sync/inbound/listener.py
|
akgunkel/sawtooth-next-directory
|
a88833033ab30e9091479a38947f04c5e396ca46
|
[
"Apache-2.0"
] | 1 |
2019-07-08T22:32:43.000Z
|
2019-07-08T22:32:43.000Z
|
rbac/ledger_sync/inbound/listener.py
|
akgunkel/sawtooth-next-directory
|
a88833033ab30e9091479a38947f04c5e396ca46
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Contributors to Hyperledger Sawtooth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
""" Sawtooth Inbound Transaction Queue Listener
"""
from rethinkdb import r
from sawtooth_sdk.protobuf import batch_pb2
from rbac.common.logs import get_default_logger
from rbac.common.sawtooth.client_sync import ClientSync
from rbac.common.sawtooth import batcher
from rbac.ledger_sync.database import Database
LOGGER = get_default_logger(__name__)
def process(rec, database):
""" Process inbound queue records
"""
try:
if "batch" not in rec or not rec["batch"]:
database.run_query(
database.get_table("inbound_queue").get(rec["id"]).delete()
)
rec["sync_direction"] = "inbound"
database.run_query(database.get_table("sync_errors").insert(rec))
return
batch = batch_pb2.Batch()
batch.ParseFromString(rec["batch"])
batch_list = batcher.batch_to_list(batch=batch)
status = ClientSync().send_batches_get_status(batch_list=batch_list)
if status[0]["status"] == "COMMITTED":
if "metadata" in rec and rec["metadata"]:
data = {
"address": rec["address"],
"object_type": rec["object_type"],
"object_id": rec["object_id"],
"provider_id": rec["provider_id"],
"created_at": r.now(),
"updated_at": r.now(),
**rec["metadata"],
}
query = (
database.get_table("metadata")
.get(rec["address"])
.replace(
lambda doc: r.branch(
# pylint: disable=singleton-comparison
(doc == None), # noqa
r.expr(data),
doc.merge(
{"metadata": rec["metadata"], "updated_at": r.now()}
),
)
)
)
result = database.run_query(query)
if (not result["inserted"] and not result["replaced"]) or result[
"errors"
] > 0:
LOGGER.warning(
"error updating metadata record:\n%s\n%s", result, query
)
rec["sync_direction"] = "inbound"
database.run_query(database.get_table("changelog").insert(rec))
database.run_query(
database.get_table("inbound_queue").get(rec["id"]).delete()
)
else:
rec["error"] = get_status_error(status)
rec["sync_direction"] = "inbound"
database.run_query(database.get_table("sync_errors").insert(rec))
database.run_query(
database.get_table("inbound_queue").get(rec["id"]).delete()
)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception(
"%s exception processing inbound record:\n%s", type(err).__name__, rec
)
LOGGER.exception(err)
def get_status_error(status):
""" Try to get the error from a transaction status
"""
try:
LOGGER.warning("Error status %s", status)
return status[0]["invalid_transactions"][0]["message"]
except Exception: # pylint: disable=broad-except
return "Unhandled error {}".format(status)
def listener():
""" Listener for Sawtooth State changes
"""
try:
database = Database()
database.connect()
LOGGER.info("Reading queued Sawtooth transactions")
while True:
feed = database.run_query(database.get_table("inbound_queue"))
count = 0
for rec in feed:
process(rec, database)
count = count + 1
if count == 0:
break
LOGGER.info("Processed %s records in the inbound queue", count)
LOGGER.info("Listening for incoming Sawtooth transactions")
feed = database.run_query(database.get_table("inbound_queue").changes())
for rec in feed:
if rec["new_val"] and not rec["old_val"]: # only insertions
process(rec["new_val"], database)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception("Inbound listener %s exception", type(err).__name__)
LOGGER.exception(err)
finally:
try:
database.disconnect()
except UnboundLocalError:
pass
| 38.175182 | 84 | 0.555258 |
9ab3b6ab4175317ba8bba745b86688474420a649
| 3,900 |
py
|
Python
|
research/nlp/skipgram/src/skipgram.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/nlp/skipgram/src/skipgram.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/nlp/skipgram/src/skipgram.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
skipgram network
"""
import os
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.nn as nn
import mindspore.ops as ops
from mindspore.common.initializer import Uniform
class SkipGram(nn.Cell):
"""Skip gram model of word2vec.
Attributes:
vocab_size: Vocabulary size.
emb_dimension: Embedding dimension.
c_emb: Embedding for center word.
n_emb: Embedding for neighbor word.
"""
def __init__(self, vocab_size, emb_dimension):
"""Initialize model parameters.
Apply for two embedding layers.
Initialize layer weight.
Args:
vocab_size: Vocabulary size.
emb_dimension: Embedding dimension.
Returns:
None
"""
super(SkipGram, self).__init__()
self.vocab_size = vocab_size
self.emb_dimension = emb_dimension
self.c_emb = nn.Embedding(vocab_size, emb_dimension, embedding_table=Uniform(0.5/emb_dimension))
self.n_emb = nn.Embedding(vocab_size, emb_dimension, embedding_table=Uniform(0))
# Operators (stateless)
self.mul = ops.Mul()
self.sum = ops.ReduceSum(keep_dims=False)
self.logsigmoid = nn.LogSigmoid()
self.expand_dims = ops.ExpandDims()
self.squeeze = ops.Squeeze()
self.transpose = ops.Transpose()
self.perm = (0, 2, 1)
self.cast = ops.Cast()
def construct(self, center_word, pos_word, neg_words):
"""Forward network construction.
Args:
center_word: center word ids.
pos_word: positive word ids.
neg_words: negative samples' word ids.
Returns:
loss.
"""
emb_u = self.c_emb(center_word) # (batch_size, emb_dim)
emb_v = self.n_emb(pos_word)
score = self.mul(emb_u, emb_v) # (batch_size, emb_dim)
score = self.sum(score, 1) # (batch_size, )
score = self.logsigmoid(score)
neg_emb_v = self.n_emb(neg_words) # (batch_size, neg_num, emb_dim)
neg_emb_v = self.transpose(neg_emb_v, self.perm) # (batch_size, emb_dim, neg_num)
emb_u2 = self.expand_dims(emb_u, 2) # (batch_size, emb_dim, 1)
neg_score = self.mul(neg_emb_v, emb_u2) # (batch_size, emb_dim, neg_num)
neg_score = self.transpose(neg_score, self.perm) # (batch_size, neg_num, emb_dim)
neg_score = self.sum(neg_score, 2) # (batch_size, neg_num)
neg_score = self.logsigmoid(-1 * neg_score)
neg_score = self.sum(neg_score, 1) # (batch_size, )
loss = self.cast(-(score + neg_score), mstype.float32)
return loss
def save_w2v_emb(self, dir_path, id2word):
"""Save word2vec embeddings to file.
Args:
id2word: map wid to word.
filename: file name.
Returns:
None.
"""
w2v_emb = dict()
parameters = []
for item in self.c_emb.get_parameters():
parameters.append(item)
emb_mat = parameters[0].asnumpy()
for wid, emb in enumerate(emb_mat):
word = id2word[wid]
w2v_emb[word] = emb
np.save(os.path.join(dir_path, 'w2v_emb.npy'), w2v_emb)
| 33.333333 | 104 | 0.623333 |
49997d7ba1396e63db8a84c1b6ca053ca4b41b0d
| 809 |
py
|
Python
|
_collections/articles/obstoanki_setup.py
|
SubZeroX/SubZeroX.github.io
|
1df9c43d538af7812e68ac07d7591f258c8c1619
|
[
"MIT"
] | null | null | null |
_collections/articles/obstoanki_setup.py
|
SubZeroX/SubZeroX.github.io
|
1df9c43d538af7812e68ac07d7591f258c8c1619
|
[
"MIT"
] | null | null | null |
_collections/articles/obstoanki_setup.py
|
SubZeroX/SubZeroX.github.io
|
1df9c43d538af7812e68ac07d7591f258c8c1619
|
[
"MIT"
] | null | null | null |
import urllib.request
import sys
import subprocess
import os
SCRIPT_URL = "".join(
[
"https://github.com/Pseudonium/Obsidian_to_Anki/releases/latest",
"/download/obsidian_to_anki.py"
]
)
REQUIRE_URL = "".join(
[
"https://github.com/Pseudonium/Obsidian_to_Anki/releases/latest",
"/download/requirements.txt"
]
)
with urllib.request.urlopen(SCRIPT_URL) as script:
with open("obsidian_to_anki.py", "wb") as f:
f.write(script.read())
with urllib.request.urlopen(REQUIRE_URL) as require:
with open("obstoankirequire.txt", "wb") as f:
f.write(require.read())
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "-r", "obstoankirequire.txt"]
)
os.remove("obstoankirequire.txt")
| 26.096774 | 79 | 0.631644 |
91f26782088d79019bc4f02ecd720aa9fc815596
| 2,626 |
py
|
Python
|
Assembler/tests/test_labels_beta.py
|
Laegluin/mikrorechner
|
7e5e878072c941e422889465c43dea838b83e5fd
|
[
"MIT"
] | 1 |
2019-01-28T01:53:20.000Z
|
2019-01-28T01:53:20.000Z
|
Assembler/tests/test_labels_beta.py
|
Laegluin/mikrorechner
|
7e5e878072c941e422889465c43dea838b83e5fd
|
[
"MIT"
] | null | null | null |
Assembler/tests/test_labels_beta.py
|
Laegluin/mikrorechner
|
7e5e878072c941e422889465c43dea838b83e5fd
|
[
"MIT"
] | null | null | null |
from tests import test
import labels as lb
def test_is_datastring():
test.assertTrue(lb.is_datastring('0xFfA'))
test.assertTrue(lb.is_datastring('-123645'))
test.assertTrue(lb.is_datastring('235'))
test.assertTrue(lb.is_datastring('0b0101001'))
test.assertFalse(lb.is_datastring('0xFfg'))
test.assertFalse(lb.is_datastring('-0xFfA'))
test.assertFalse(lb.is_datastring('-0b1'))
test.assertFalse(lb.is_datastring(''))
def test_necessary_byte_storage():
test.assertEquals(lb.necessary_byte_storage('R4 = R5 + R6'), 4)
test.assertEquals(lb.necessary_byte_storage('\t'), 0)
test.assertEquals(lb.necessary_byte_storage('0xFFFFFF'), 3)
test.assertEquals(lb.necessary_byte_storage('0'), 1)
test.assertEquals(lb.necessary_byte_storage('256'), 2)
test.assertEquals(lb.necessary_byte_storage('255'), 1)
def test_cut_labels():
test.assertEquals(lb.cut_labels(['hallo welt _sdf']), ['hallo welt'])
test.assertEquals(lb.cut_labels(['hallo welt_sdf']), ['hallo welt_sdf'])
test.assertEquals(lb.cut_labels(['hallo_welt _sdf']), ['hallo_welt'])
test.assertEquals(lb.cut_labels(['hallo welt _sdf usw ']), ['hallo welt _sdf usw '])
test.assertEquals(lb.cut_labels(['hallo welt _??']), ['hallo welt _??'])
test.assertEquals(lb.cut_labels(['']), [''])
def test_cut_comments():
test.assertEquals(lb.cut_comments(['hallo welt _sdf #hier erst cutten']), ['hallo welt _sdf '])
test.assertEquals(lb.cut_comments(['hallo welt _sdf#hier erst cutten']), ['hallo welt _sdf'])
test.assertEquals(lb.cut_comments(['hallo welt #hier erst cutten _keine_labels_mitzählen']), ['hallo welt '])
test.assertEquals(lb.cut_comments(['#hallo welt _sdf #hier erst cutten']), [''])
def test_cut_whitespace_lines():
test.assertEquals(lb.cut_whitespace_lines(['']), [])
test.assertEquals(lb.cut_whitespace_lines(['\t', 'a']), ['a'])
test.assertEquals(lb.cut_whitespace_lines(['\n']), [])
test.assertEquals(lb.cut_whitespace_lines(['hallo', '', 'hallo']), ['hallo', 'hallo'])
def test_get_label_values_dictionary():
test.assertEquals(lb.get_label_values_dictionary(['R4 = R5 _label']), {'label': 'R4 = R5 '})
test.assertEquals(lb.get_label_values_dictionary(['R4 = R5 _label #hier nicht']), {})
test.assertEquals(lb.get_label_values_dictionary([' _label']), {'label': ' '})
test.assertEquals(lb.get_label_values_dictionary(['R4 = R5 _?']), {})
def test_all():
test_is_datastring()
test_necessary_byte_storage()
test_cut_labels()
test_cut_comments()
test_get_label_values_dictionary()
test_cut_whitespace_lines()
| 45.275862 | 113 | 0.706778 |
626e6c09f08958d78e1a96c9b4eba4f98fb25e05
| 1,578 |
py
|
Python
|
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import re
for _ in range(int(input())):
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1)(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', input())
if m is None:
print('Invalid')
else:
print('Valid')
# TEST
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=^(?:([\s\w\d])(?!.*\1))*$)', input())
# m = re.match(r'(?=(.*\d+){3,})', n)
# m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', n)
# m = re.match(r'([a-zA-Z0-9]).*?\1', n)
# m = re.match(r'([a-zA-Z0-9]).*?\1+', n)
# m = re.match(r'([a-zA-Z0-9]){1,}.*?\1+', n)
'''
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', n)
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1)', n)
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1+)', n)
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)(?!([a-zA-Z0-9]){1,}.*?\1+)', n)
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)(?!([a-zA-Z0-9]){1,}.*?\1)', n)
'''
# n = input()
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=^(?:([\s\w\d])(?!.*\1))*$)', n)
# print(m, n)
# m = re.match(r'(?=.*[0-9]){3,}(?=.*[A-Z]){2,}', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=(.)\1)', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?:([\s\w\d])(?!.*\1))', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})', n)
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?:([\s\w\d])(?!.*\1))', n)
| 41.526316 | 107 | 0.308619 |
628dca7eafeadab9b0364001a2b436c3a726d50b
| 2,997 |
py
|
Python
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/history.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 4 |
2020-11-13T02:35:56.000Z
|
2021-03-29T20:15:54.000Z
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/history.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 9 |
2020-12-09T02:12:39.000Z
|
2021-02-18T00:15:28.000Z
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/history.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 1 |
2020-11-20T19:31:26.000Z
|
2020-11-20T19:31:26.000Z
|
#!/usr/bin/env python
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
# Copyright (C) 2011-2020 German Aerospace Center (DLR) and others.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# https://www.eclipse.org/legal/epl-2.0/
# This Source Code may also be made available under the following Secondary
# Licenses when the conditions for such availability set forth in the Eclipse
# Public License 2.0 are satisfied: GNU General Public License, version 2
# or later which is available at
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
# @file history.py
# @author Michael Behrisch
# @date 2014-06-21
"""
This script builds all sumo versions in a certain revision range
and tries to eliminate duplicates afterwards.
"""
from __future__ import absolute_import
import subprocess
import optparse
import shutil
import os
import sys
import traceback
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import sumolib # noqa
optParser = optparse.OptionParser()
optParser.add_option("-b", "--begin", default="v1_3_0", help="first revision to build")
optParser.add_option("-e", "--end", default="HEAD", help="last revision to build")
options, args = optParser.parse_args()
LOCK = "../history.lock"
if os.path.exists(LOCK):
sys.exit("History building is still locked!")
open(LOCK, 'w').close()
try:
subprocess.call(["git", "checkout", "-q", "master"])
subprocess.call(["git", "pull"])
commits = {}
for line in subprocess.check_output(["git", "log", "%s..%s" % (options.begin, options.end)]).splitlines():
if line.startswith("commit "):
h = line.split()[1]
commits[h] = sumolib.version.gitDescribe(h)
haveBuild = False
for h, desc in sorted(commits.items(), key=lambda x: x[1]):
if not os.path.exists('../bin%s' % desc):
ret = subprocess.call(["git", "checkout", "-q", h])
if ret != 0:
continue
os.chdir("build/cmake-build")
subprocess.call('make clean; make -j32', shell=True)
os.chdir("../..")
haveBuild = True
shutil.copytree('bin', '../bin%s' % desc,
ignore=shutil.ignore_patterns('Makefile*', '*.bat', '*.jar'))
subprocess.call('strip -R .note.gnu.build-id ../bin%s/*' % desc, shell=True)
subprocess.call("sed -i 's/%s/%s/' ../bin%s/*" % (desc, len(desc) * "0", desc), shell=True)
if haveBuild:
for line in subprocess.check_output('fdupes -1 -q ../binv*', shell=True).splitlines():
dups = line.split()
for d in dups[1:]:
subprocess.call('ln -sf %s %s' % (dups[0], d), shell=True)
subprocess.call(["git", "checkout", "-q", "master"])
except Exception:
traceback.print_exc()
os.remove(LOCK)
| 39.96 | 110 | 0.644311 |
b8242ba9873540a3dbca15f727f6c96c2a8fc842
| 467 |
py
|
Python
|
bind/pyevt/pyevt/evt_data.py
|
harrywong/evt
|
95985384619e0f5ff4021e8838d421ac4b4b946d
|
[
"BSD-3-Clause"
] | 1,411 |
2018-04-23T03:57:30.000Z
|
2022-02-13T10:34:22.000Z
|
bind/pyevt/pyevt/evt_data.py
|
Zhang-Zexi/evt
|
e90fe4dbab4b9512d120c79f33ecc62791e088bd
|
[
"Apache-2.0"
] | 27 |
2018-06-11T10:34:42.000Z
|
2019-07-27T08:50:02.000Z
|
bind/pyevt/pyevt/evt_data.py
|
Zhang-Zexi/evt
|
e90fe4dbab4b9512d120c79f33ecc62791e088bd
|
[
"Apache-2.0"
] | 364 |
2018-06-09T12:11:53.000Z
|
2020-12-15T03:26:48.000Z
|
from io import StringIO
from . import evt_exception, libevt
class EvtData:
def __init__(self, data):
self.data = data
self.evt = libevt.check_lib_init()
def __del__(self):
ret = self.evt.lib.evt_free(self.data)
evt_exception.evt_exception_raiser(ret)
def to_hex_string(self):
hstr = StringIO()
for i in range(self.data.sz):
hstr.write(self.data.buf[i].hex())
return hstr.getvalue()
| 23.35 | 47 | 0.627409 |
b257a9e52146db719e997ed8872a96d99f210459
| 4,000 |
py
|
Python
|
research/nlp/dscnn/eval.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/nlp/dscnn/eval.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/nlp/dscnn/eval.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===========================================================================
"""DSCNN eval."""
import os
import datetime
import glob
import numpy as np
from mindspore import context
from mindspore import Tensor, Model
from mindspore.common import dtype as mstype
from src.log import get_logger
from src.dataset import audio_dataset
from src.ds_cnn import DSCNN
from src.models import load_ckpt
from src.model_utils.config import config
from src.model_utils.moxing_adapter import moxing_wrapper
from src.model_utils.device_adapter import get_device_id
def get_top5_acc(top5_arg, gt_class):
sub_count = 0
for top5, gt in zip(top5_arg, gt_class):
if gt in top5:
sub_count += 1
return sub_count
def val(args, model, test_de):
'''Eval.'''
eval_dataloader = test_de.create_tuple_iterator()
img_tot = 0
top1_correct = 0
top5_correct = 0
for data, gt_classes in eval_dataloader:
output = model.predict(Tensor(data, mstype.float32))
output = output.asnumpy()
top1_output = np.argmax(output, (-1))
top5_output = np.argsort(output)[:, -5:]
gt_classes = gt_classes.asnumpy()
t1_correct = np.equal(top1_output, gt_classes).sum()
top1_correct += t1_correct
top5_correct += get_top5_acc(top5_output, gt_classes)
img_tot += output.shape[0]
results = [[top1_correct], [top5_correct], [img_tot]]
results = np.array(results)
top1_correct = results[0, 0]
top5_correct = results[1, 0]
img_tot = results[2, 0]
acc1 = 100.0 * top1_correct / img_tot
acc5 = 100.0 * top5_correct / img_tot
if acc1 > args.best_acc:
args.best_acc = acc1
args.best_index = args.index
args.logger.info('Eval: top1_cor:{}, top5_cor:{}, tot:{}, acc@1={:.2f}%, acc@5={:.2f}%' \
.format(top1_correct, top5_correct, img_tot, acc1, acc5))
@moxing_wrapper(pre_process=None)
def main():
context.set_context(mode=context.GRAPH_MODE, device_target=config.device_target, device_id=get_device_id())
# Logger
config.outputs_dir = os.path.join(config.log_path, datetime.datetime.now().strftime('%Y-%m-%d_time_%H_%M_%S'))
config.logger = get_logger(config.outputs_dir)
# show args
config.logger.save_args(config)
# find model path
if os.path.isdir(config.model_dir):
models = list(glob.glob(os.path.join(config.model_dir, '*.ckpt')))
print(models)
f = lambda x: -1 * int(os.path.splitext(os.path.split(x)[-1])[0].split('-')[0].split('epoch')[-1])
config.models = sorted(models, key=f)
else:
config.models = [config.model_dir]
config.best_acc = 0
config.index = 0
config.best_index = 0
for model_path in config.models:
test_de = audio_dataset(config.eval_feat_dir, 'testing', config.model_setting_spectrogram_length,
config.model_setting_dct_coefficient_count, config.per_batch_size)
network = DSCNN(config, config.model_size_info)
load_ckpt(network, model_path, False)
network.set_train(False)
model = Model(network)
config.logger.info('load model %s success', model_path)
val(config, model, test_de)
config.index += 1
config.logger.info('Best model:{} acc:{:.2f}%'.format(config.models[config.best_index], config.best_acc))
if __name__ == "__main__":
main()
| 35.714286 | 114 | 0.6725 |
a23603b7b843cb11a99579993e200710f7f70f18
| 9,071 |
py
|
Python
|
test/test_npu/test_network_ops/test_bitwise_xor.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_bitwise_xor.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_bitwise_xor.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
import random
class TestBitwiseXor(TestCase):
def generate_data(self, min, max, shape_x, shape_y, dtype):
input1 = np.random.randint(min, max, shape_x).astype(dtype)
input2 = np.random.randint(min, max, shape_y).astype(dtype)
#can't convert np.uint16 to pytoch tensor, so convert np.uint16 to np.int32 first
if input1.dtype == np.uint16:
input1 = input1.astype(np.int32)
input2 = input2.astype(np.int32)
# modify from numpy.ndarray to torch.tensor
npu_input1 = torch.from_numpy(input1)
npu_input2 = torch.from_numpy(input2)
return npu_input1, npu_input2
def cpu_op_exec(self, input1, input2):
output = torch.bitwise_xor(input1, input2)
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def npu_op_exec(self, input1, input2):
input1 = input1.to("npu")
input2 = input2.to("npu")
output = torch.bitwise_xor(input1, input2)
output = output.to("cpu")
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def cpu_op_exec_scalar(self, input1, scalar):
output = torch.bitwise_xor(input1, scalar)
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def npu_op_exec_scalar(self, input1, input2):
input1 = input1.to("npu")
output = torch.bitwise_xor(input1, input2)
output = output.to("cpu")
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def npu_op_exec_scalar_out(self, input1, scalar, output):
input1 = input1.to("npu")
output = output.to("npu")
output = torch.bitwise_xor(input1, scalar, out = output)
output = output.to("cpu")
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def npu_op_exec_out(self, input1, input2, input3):
input1 = input1.to("npu")
input2 = input2.to("npu")
output = input3.to("npu")
torch.bitwise_xor(input1, input2, out=output)
output = output.to("cpu")
if output.dtype not in [torch.int32, torch.bool]:
output = output.to(torch.int32)
output = output.numpy()
return output
def bitwise_xor_tensor_out_result(self, shape_format):
for item in shape_format:
cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)
cpu_input2, npu_input2 = create_common_tensor(item[0], 0, 100)
cpu_input3, npu_input3 = create_common_tensor(item[1], 0, 100)
cpu_output_out = self.cpu_op_exec(cpu_input1, cpu_input2)
npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3)
cpu_output_out = cpu_output_out.astype(npu_output_out.dtype)
self.assertRtolEqual(cpu_output_out, npu_output_out)
def test_bitwise_xor_tensor_out(self, device):
shape_format = [
[[np.int16, 0, [128, 3, 224, 224]], [np.int16, 0, [3, 3, 3]]],
[[np.int16, 0, [128, 116, 14, 14]], [np.int16, 0, [128, 116, 14, 14]]],
[[np.int32, 0, [256, 128, 7, 7]], [np.int32, 0, [128, 256, 3, 3]]],
[[np.int32, 0, [2, 3, 3, 3]], [np.int32, 0, [3, 1, 3]]],
[[np.int32, 0, [128, 232, 7, 7]], [np.int32, 0, [128, 232, 7, 7]]],
]
self.bitwise_xor_tensor_out_result(shape_format)
def bitwise_xor_scalar_out_result(self, shape_format):
for item in shape_format:
cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)
cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100)
scalar = np.random.randint(1, 5)
cpu_output_out = self.cpu_op_exec_scalar(cpu_input1, scalar)
npu_output_out = self.npu_op_exec_scalar_out(npu_input1, scalar, npu_input2)
cpu_output_out = cpu_output_out.astype(npu_output_out.dtype)
self.assertRtolEqual(cpu_output_out, npu_output_out)
def test_bitwise_xor_scalar_out(self, device):
shape_format = [
[[np.int16, 0, [16, 3, 1111, 1212]], [np.int16, 0, [3, 3, 3]]],
[[np.int16, 0, [128, 116, 14, 14]], [np.int16, 0, [128, 116, 14, 14]]],
[[np.int32, 0, [1313, 3, 3, 3]], [np.int32, 0, [3, 1, 3]]],
[[np.int32, 0, [128, 232, 7, 7]], [np.int32, 0, [128, 232, 7, 7]]],
]
self.bitwise_xor_scalar_out_result(shape_format)
def test_bitwise_xor_int16_3d(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (3, 3, 3), (3, 3, 3), np.int16)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_int16_1_1(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (3, 3, 3), (1, 1), np.int16)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_int16_1(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (3, 3, 3), 1, np.int16)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_int16(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (3, 3, 3), (), np.int16)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_int32(self, device):
npu_input1, npu_input2 = self.generate_data(0, 2, (1, 3), (1, 3), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, True)
npu_output = self.npu_op_exec_scalar(npu_input1, True)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_bool(self, device):
npu_input1, npu_input2 = self.generate_data(0, 2, (1, 3), (1, 3), np.bool)
cpu_output = self.cpu_op_exec(npu_input1, True)
npu_output = self.npu_op_exec_scalar(npu_input1, True)
cpu_output = cpu_output.astype(np.float32)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_uint16(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (3, 3, 3), (3, 3, 3), np.uint16)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
cpu_output = cpu_output.astype(np.float32)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
npu_output = npu_output.astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
def test_bitwise_xor_mix_dtype(self, device):
npu_input1, npu_input3 = self.generate_data(0, 100, (3, 3, 3), (), np.uint16)
npu_input2, npu_input4 = self.generate_data(0, 100, (3, 3, 3), (), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(TestBitwiseXor, globals(), except_for='cpu')
if __name__ == "__main__":
run_tests()
| 44.684729 | 92 | 0.653732 |
ac3300de7f3aefafe8411e87cbf809699c355a6f
| 258 |
py
|
Python
|
marsyas-vamp/marsyas/scripts/Python/batchPeakClustering.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/scripts/Python/batchPeakClustering.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/scripts/Python/batchPeakClustering.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
import os
from glob import glob
beginCommand = "./peakClustering "
endCommand = " -a -s -p 2 -c 3 -o ~/output -N music ";
for name in glob("../../../Database/*V.wav"):
command = beginCommand+name+endCommand
print command
os.system(command)
| 25.8 | 56 | 0.643411 |
ce009968e8640e6d1c77e4d902dc078249782d25
| 2,330 |
py
|
Python
|
src/onegov/form/extensions.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/form/extensions.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/form/extensions.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
form_extensions = {}
class FormExtension(object):
""" Enables the extension of form definitions/submissions.
When either of those models create a form class they will take the
'extensions' key in the meta dictionary to extend those formcode
based forms.
This allows for specialised behaviour of formcode forms with the drawback
that those definitions/submissions are more tightly bound to the code. That
is to say code in module A could not use submissions defined by module B
unless module B is also present in the path.
To create and register a form extension subclass as follows::
class MyExtension(FormExtension, name='my-extension'):
def create(self):
return self.form_class
Note that you *should not* change the form_class provided to you. Instead
you should subclass it. If you need to change the form class, you need
to clone it::
class MyExtension(FormExtension, name='my-extension'):
def create(self):
return self.form_class.clone()
class MyExtension(FormExtension, name='my-extension'):
def create(self):
class ExtendedForm(self.form_class):
pass
return ExtendedForm
Also, names must be unique and can only be registered once.
"""
def __init__(self, form_class):
self.form_class = form_class
def __init_subclass__(cls, name, **kwargs):
super().__init_subclass__(**kwargs)
assert name not in form_extensions, (
f"A form extension named {name} already exists"
)
form_extensions[name] = cls
def create(self):
raise NotImplementedError
class Extendable(object):
""" Models extending their form classes use this mixin to create the
extended forms. It also serves as a marker to possibly keep track of all
classes that use extended forms.
"""
def extend_form_class(self, form_class, extensions):
if not extensions:
return form_class
for extension in extensions:
if extension not in form_extensions:
raise KeyError(f"Unknown form extension: {extension}")
form_class = form_extensions[extension](form_class).create()
return form_class
| 31.066667 | 79 | 0.663519 |
ce3502f5079fa6852e13265b391e01e6ac109b62
| 967 |
py
|
Python
|
rivercam.py
|
OrrinEdenfield/RiverCam
|
207f8c623bbcb9dc0cdbbefe91e1fd33bdb0b84e
|
[
"MIT"
] | null | null | null |
rivercam.py
|
OrrinEdenfield/RiverCam
|
207f8c623bbcb9dc0cdbbefe91e1fd33bdb0b84e
|
[
"MIT"
] | null | null | null |
rivercam.py
|
OrrinEdenfield/RiverCam
|
207f8c623bbcb9dc0cdbbefe91e1fd33bdb0b84e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import os
import datetime
from picamera import PiCamera
from time import sleep
from azure.storage.blob import BlobClient
# Path to temporary local image file
localpic = '/home/pi/rivercam/image.jpg'
# Take photo
camera = PiCamera()
sleep(5)
camera.capture(localpic)
# Create the variable to use for the filename
dt = str(datetime.datetime.now())
newdt = dt.replace(":", "-")
newdt = newdt.replace(" ", "-")
newdt = newdt.replace(".", "-")
newdt = newdt[0:16]
newname = newdt+'.jpg'
# Upload to local IoT Edge Blob Service
blob = BlobClient.from_connection_string(conn_str="DefaultEndpointsProtocol=http;BlobEndpoint=http://192.168.0.201:11002/azurepistorage;AccountName=azurepistorage;AccountKey=[LOCAL-IOT-EDGE-BLOB-KEY]", container_name="pisynccontainer", blob_name=newname)
with open(localpic, "rb") as data:
blob.upload_blob(data)
# Delete the local file now that it's been uploaded
os.remove(localpic)
| 30.21875 | 255 | 0.730093 |
02007348627f4ef13c1bf7f02eefb74e199a2762
| 7,788 |
py
|
Python
|
python/oneflow/test/graph/test_graph_lr_scheduler.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | null | null | null |
python/oneflow/test/graph/test_graph_lr_scheduler.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | null | null | null |
python/oneflow/test/graph/test_graph_lr_scheduler.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import unittest
import os
import numpy as np
import oneflow as flow
import oneflow.unittest
from oneflow.nn.parameter import Parameter
def _test_linear_graph_train_with_lr_sch(
test_case, iter_num, device, get_opt_and_lr_sch
):
def train_with_module(iter_num=3):
linear = flow.nn.Linear(3, 8)
linear = linear.to(device)
flow.nn.init.constant_(linear.weight, -0.68758)
flow.nn.init.constant_(linear.bias, 0.23)
opt, lr_sch = get_opt_and_lr_sch(linear.parameters())
x = flow.Tensor(
[
[-0.94630778, -0.83378579, -0.87060891],
[2.0289922, -0.28708987, -2.18369248],
[0.35217619, -0.67095644, -1.58943879],
[0.08086036, -1.81075924, 1.20752494],
[0.8901075, -0.49976737, -1.07153746],
[-0.44872912, -1.07275683, 0.06256855],
[-0.22556897, 0.74798368, 0.90416439],
[0.48339456, -2.32742195, -0.59321527],
],
device=device,
requires_grad=False,
)
def one_iter():
of_out = linear(x)
of_out = of_out.sum()
of_out.backward()
opt.step()
if lr_sch is not None:
lr_sch.step()
opt.zero_grad()
return of_out.numpy(), linear.weight.numpy()
check_list = []
for i in range(iter_num):
check_list.append(one_iter())
return check_list
def train_with_graph(iter_num=3):
linear = flow.nn.Linear(3, 8)
linear = linear.to(device)
flow.nn.init.constant_(linear.weight, -0.68758)
flow.nn.init.constant_(linear.bias, 0.23)
opt, lr_sch = get_opt_and_lr_sch(linear.parameters())
x = flow.Tensor(
[
[-0.94630778, -0.83378579, -0.87060891],
[2.0289922, -0.28708987, -2.18369248],
[0.35217619, -0.67095644, -1.58943879],
[0.08086036, -1.81075924, 1.20752494],
[0.8901075, -0.49976737, -1.07153746],
[-0.44872912, -1.07275683, 0.06256855],
[-0.22556897, 0.74798368, 0.90416439],
[0.48339456, -2.32742195, -0.59321527],
],
device=device,
requires_grad=False,
)
class LinearTrainGraph(flow.nn.Graph):
def __init__(self):
super().__init__()
self.linear = linear
if lr_sch is None:
self.add_optimizer(opt)
else:
self.add_optimizer(opt, lr_sch=lr_sch)
def build(self, x):
out = self.linear(x)
out = out.sum()
out.backward()
return out
linear_t_g = LinearTrainGraph()
def one_iter():
of_graph_out = linear_t_g(x)
return of_graph_out.numpy(), linear_t_g.linear.weight.origin.numpy()
check_list = []
for i in range(iter_num):
check_list.append(one_iter())
return check_list
module_check_list = train_with_module(iter_num)
graph_check_list = train_with_graph(iter_num)
for i in range(iter_num):
# check equal on loss
test_case.assertTrue(
np.allclose(
module_check_list[i][0],
graph_check_list[i][0],
rtol=0.00001,
atol=0.00001,
)
)
# check equal on weight
test_case.assertTrue(
np.allclose(
module_check_list[i][1],
graph_check_list[i][1],
rtol=0.00001,
atol=0.00001,
)
)
def _sgd_cosine_fn(parameters):
of_sgd = flow.optim.SGD(parameters, lr=0.001)
alpha = 0.5
steps = 10
cosine_annealing_lr = flow.optim.lr_scheduler.CosineAnnealingLR(
of_sgd, steps=steps, alpha=alpha
)
return of_sgd, cosine_annealing_lr
def _sgd_cosine_constant_fn(parameters):
of_sgd = flow.optim.SGD(parameters, lr=0.001)
alpha = 0.5
steps = 10
cosine_annealing_lr = flow.optim.lr_scheduler.CosineAnnealingLR(
of_sgd, steps=steps, alpha=alpha
)
constant_warmup_cosine_lr = flow.optim.lr_scheduler.WarmUpLR(
cosine_annealing_lr, warmup_factor=0.5, warmup_iters=5, warmup_method="constant"
)
return of_sgd, constant_warmup_cosine_lr
def _sgd_constant_fn(parameters):
of_sgd = flow.optim.SGD(parameters, lr=0.001)
alpha = 0.5
steps = 10
constant_warmup_lr = flow.optim.lr_scheduler.WarmUpLR(
of_sgd, warmup_factor=0.5, warmup_iters=5, warmup_method="constant"
)
return of_sgd, constant_warmup_lr
def _sgd_cosine_linear_fn(parameters):
of_sgd = flow.optim.SGD(parameters, lr=0.001)
alpha = 0.5
steps = 10
cosine_annealing_lr = flow.optim.lr_scheduler.CosineAnnealingLR(
of_sgd, steps=steps, alpha=alpha
)
linear_warmup_cosine_lr = flow.optim.lr_scheduler.WarmUpLR(
cosine_annealing_lr, warmup_factor=0.5, warmup_iters=5, warmup_method="linear"
)
return of_sgd, linear_warmup_cosine_lr
def _sgd_linear_fn(parameters):
of_sgd = flow.optim.SGD(parameters, lr=0.001)
alpha = 0.5
steps = 10
linear_warmup_lr = flow.optim.lr_scheduler.WarmUpLR(
of_sgd, warmup_factor=0.5, warmup_iters=5, warmup_method="linear"
)
return of_sgd, linear_warmup_lr
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
@flow.unittest.skip_unless_1n1d()
class TestLinearGraphTrainWithCosineLrScheduler(flow.unittest.TestCase):
def test_graph_cosine(test_case):
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cuda"), _sgd_cosine_fn
)
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cpu"), _sgd_cosine_fn
)
def test_graph_cosine_constant(test_case):
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cuda"), _sgd_cosine_constant_fn
)
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cpu"), _sgd_cosine_constant_fn
)
def test_graph_constant(test_case):
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cuda"), _sgd_constant_fn
)
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cpu"), _sgd_constant_fn
)
def test_graph_cosine_linear(test_case):
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cuda"), _sgd_cosine_linear_fn
)
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cpu"), _sgd_cosine_linear_fn
)
def test_graph_linear(test_case):
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cuda"), _sgd_linear_fn
)
_test_linear_graph_train_with_lr_sch(
test_case, 21, flow.device("cpu"), _sgd_linear_fn
)
if __name__ == "__main__":
unittest.main()
| 32.049383 | 88 | 0.616846 |
cec82a491e48e098ad32e60bf03ec9fd31eb84bc
| 696 |
py
|
Python
|
py/jpy/src/test/python/jpy_obj_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 55 |
2021-05-11T16:01:59.000Z
|
2022-03-30T14:30:33.000Z
|
py/jpy/src/test/python/jpy_obj_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 943 |
2021-05-10T14:00:02.000Z
|
2022-03-31T21:28:15.000Z
|
py/jpy/src/test/python/jpy_obj_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 29 |
2021-05-10T11:33:16.000Z
|
2022-03-30T21:01:54.000Z
|
import unittest
import jpyutil
jpyutil.init_jvm(jvm_maxmem='32M', jvm_classpath=['target/test-classes'])
import jpy
class TestJavaArrays(unittest.TestCase):
def setUp(self):
self.Fixture = jpy.get_type('org.jpy.fixtures.ConstructionTestFixture')
self.assertIsNotNone(self.Fixture)
def test_large_obj_by_constructor_alloc(self):
# 100 * 1MB
for _ in range(100):
fixture = self.Fixture(1000000) # 1MB
def test_large_obj_by_static_alloc(self):
# 100 * 1MB
for _ in range(100):
fixture = self.Fixture.viaStatic(1000000) # 1MB
if __name__ == '__main__':
print('\nRunning ' + __file__)
unittest.main()
| 24.857143 | 79 | 0.668103 |
65146ef8dc35caec09ada8e0674533c36180a7c2
| 4,469 |
py
|
Python
|
Packs/Dig/Scripts/Dig/Dig.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/Dig/Scripts/Dig/Dig.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/Dig/Scripts/Dig/Dig.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import re
import subprocess
import traceback
from typing import Any, Dict
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
''' STANDALONE FUNCTION '''
# Run Dig command on the server and get A record for the specified host
def dig_result(server: str, name: str):
try:
if server:
server = f"@{server}"
dig_output = subprocess.check_output(
['dig', server, name, '+short', '+identify'], stderr=subprocess.STDOUT, universal_newlines=True
)
if not dig_output:
raise ValueError("Couldn't find A record for:\n" + name)
resolved_addresses, dns_server = regex_result(dig_output, reverse_lookup=False)
return {"name": name, "resolvedaddresses": resolved_addresses, "nameserver": dns_server}
else:
dig_output = subprocess.check_output(
['dig', name, '+short', '+identify'], stderr=subprocess.STDOUT, universal_newlines=True
)
if not dig_output:
raise ValueError("Couldn't find A record for:\n" + name)
resolved_addresses, dns_server = regex_result(dig_output, reverse_lookup=False)
return {"name": name, "resolvedaddresses": resolved_addresses, "nameserver": dns_server}
except subprocess.CalledProcessError as e:
return_error(e.output)
# Run Dig command on the server and get PTR record for the specified IP
def reverse_dig_result(server: str, name: str):
try:
if server:
server = f"@{server}"
dig_output = subprocess.check_output(
['dig', server, '+answer', '-x', name, '+short', '+identify'], stderr=subprocess.STDOUT, universal_newlines=True
)
if not dig_output:
raise ValueError("Couldn't find PTR record for:\n" + name)
resolved_addresses, dns_server = regex_result(dig_output, reverse_lookup=True)
return {"name": name, "resolveddomain": resolved_addresses, "nameserver": dns_server}
else:
dig_output = subprocess.check_output(
['dig', '+answer', '-x', name, '+short', '+identify'], stderr=subprocess.STDOUT, universal_newlines=True
)
if not dig_output:
raise ValueError("Couldn't find PTR record for:\n" + name)
resolved_addresses, dns_server = regex_result(dig_output, reverse_lookup=True)
return {"name": name, "resolveddomain": resolved_addresses, "nameserver": dns_server}
except subprocess.CalledProcessError as e:
return_error(e.output)
def regex_result(dig_output: str, reverse_lookup: bool):
# regex phrase to catch a number between 0 to 255
num_0_255 = r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])'
try:
if not reverse_lookup:
regex_results_ip = re.findall(rf'\b(?:{num_0_255}(?:\[\.\]|\.)){{3}}{num_0_255}\b', dig_output)
if not regex_results_ip:
raise ValueError("Couldn't find results:\n")
resolved_addresses = regex_results_ip[::2]
dns_server = regex_results_ip[1]
else:
regex_results_domain = re.findall(
rf'\b^[\S]+|(?:{num_0_255}(?:\[\.\]|\.)){{3}}{num_0_255}\b', dig_output)
if not regex_results_domain:
raise ValueError("Couldn't find results:\n")
resolved_addresses = regex_results_domain[0]
dns_server = regex_results_domain[1]
except Exception as e:
return_error(str(e))
return resolved_addresses, dns_server
''' COMMAND FUNCTION '''
def dig_command(args: Dict[str, Any]) -> CommandResults:
server = args.get('server', None)
name = args.get('name', None)
reverse_lookup = argToBoolean(args.get("reverseLookup"))
if reverse_lookup:
result = reverse_dig_result(server, name)
else:
result = dig_result(server, name)
return CommandResults(
outputs_prefix='digresults',
outputs=result,
ignore_auto_extract=True
)
''' MAIN FUNCTION '''
def main():
try:
return_results(dig_command(demisto.args()))
except Exception as ex:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute Dig. Error: {str(ex)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| 33.103704 | 128 | 0.620944 |
6525e20fd56f57771ac5fa7fed4aacac1e11f0bb
| 11,933 |
py
|
Python
|
train.py
|
quanghona/SOLO_tf2
|
4aab0fc9115d210f08e694ec59b5f093ade8ce91
|
[
"MIT"
] | 8 |
2021-03-07T10:25:21.000Z
|
2022-02-20T23:57:24.000Z
|
train.py
|
quanghona/SOLO_tf2
|
4aab0fc9115d210f08e694ec59b5f093ade8ce91
|
[
"MIT"
] | null | null | null |
train.py
|
quanghona/SOLO_tf2
|
4aab0fc9115d210f08e694ec59b5f093ade8ce91
|
[
"MIT"
] | null | null | null |
from model.model import SOLO
from train.loss import SOLOLoss
from data.tfrecord_decode import Parser
from config import *
import argparse
from datetime import datetime
import time
import os
import tensorflow as tf
from tensorflow.keras.utils import Progbar
tf.config.run_functions_eagerly(False) # for debugging
@tf.function
def train_step(model, loss_fn, optimizer, images, cat_true, mask_true, cat_metric, mask_metric):
with tf.GradientTape() as tape:
cat_pred, mask_pred = model(image, training=True)
total_loss, l_cate, l_mask = loss_fn((cat_true, mask_true), (cat_pred, mask_pred))
gradients = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
cat_metric.update_state(cat_true, cat_pred)
mask_metric.update_state(mask_true, mask_pred)
return total_loss, l_cate, l_mask
@tf.function
def test_step(model, loss_fn, images, cat_true, mask_true, cat_metric, mask_metric):
cat_pred, mask_pred = model(image, training=False)
total_loss, l_cate, l_mask = loss_fn(cat_true, mask_true, cat_pred, mask_pred)
cat_metric.update_state(cat_true, cat_pred)
mask_metric.update_state(mask_true, mask_pred)
return total_loss, l_cate, l_mask
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='SOLO network training script')
parser.add_argument("--dataset_train", type=str,
help="path to training dataset tfrecord BASE path")
parser.add_argument("--dataset_val", type=str,
help="path to validation dataset tfrecord BASE path")
args = parser.parse_args()
print("Training SOLO network")
display_config("train")
# Load model
model = SOLO(**MODEL_HYPERPARAMETERS)
# add weight decay
for layer in model.layers:
if isinstance(layer, tf.keras.layers.Conv2D) or isinstance(layer, tf.keras.layers.Dense):
layer.add_loss(lambda: tf.keras.regularizers.l2(TRAINING_PARAMETERS['weight_decay'])(layer.kernel))
if hasattr(layer, 'bias_regularizer') and layer.use_bias:
layer.add_loss(lambda: tf.keras.regularizers.l2(TRAINING_PARAMETERS['weight_decay'])(layer.bias))
# Training scheme
lr_schedule = tf.keras.optimizers.schedules.PiecewiseConstantDecay(boundaries=tf.math.multiply(TRAINING_PARAMETERS['epochs'], TRAINING_PARAMETERS['steps_per_epoch']),
values=tf.constant(TRAINING_PARAMETERS['learning_rates']))
optimizer = tf.keras.optimizers.SGD(learning_rate=lr_schedule, momentum=TRAINING_PARAMETERS['momentum'])
loss_fn = SOLOLoss()
# Load data
train_parser = Parser(MODEL_HYPERPARAMETERS['input_size'],
MODEL_HYPERPARAMETERS['grid_sizes'][0],
MODEL_HYPERPARAMETERS['num_class'],
mode='train')
val_parser = Parser(MODEL_HYPERPARAMETERS['input_size'],
MODEL_HYPERPARAMETERS['grid_sizes'][0],
MODEL_HYPERPARAMETERS['num_class'],
mode='val')
train_dataset = train_parser.build_dataset(args.dataset_train,
batch_size=TRAINING_PARAMETERS['batch_size'],
num_epoch=TRAINING_PARAMETERS['num_epoch'])
val_dataset = val_parser.build_dataset(args.dataset_val)
"""Training using built-in method
tb_callback = tf.keras.callbacks.TensorBoard(log_dir=os.path.join('logs', model.model_name), update_freq='batch')
ckpt_callback = tf.keras.callbacks.ModelCheckpoint(filepath=os.path.join('weights', model.model_name, 'weight_' + model.model_name + '.h5'),
save_best_only=True,
save_weights_only=True)
model.compile(optimizer=optimizer,
loss=[loss_fn.get_category_loss(), loss_fn.get_mask_loss()],
loss_weights=loss_fn.weights,
metrics=[tf.keras.metrics.CategoricalAccuracy(),
tf.keras.metrics.MeanIoU(num_classes=MODEL_HYPERPARAMETERS['num_class'])])
model.fit(x=train_dataset,
batch_size=TRAINING_PARAMETERS['batch_size'],
epochs=TRAINING_PARAMETERS['num_epoch'],
shuffle=True,
steps_per_epoch=TRAINING_PARAMETERS['steps_per_epoch'],
validation_data=val_dataset,
validation_batch_size=TRAINING_PARAMETERS['batch_size'],
verbose=1,
callbacks=[tb_callback, ckpt_callback])
"""
# Training using low-level API
# Load/create Checkpoint
ckpt = tf.train.Checkpoint(step=tf.Variable(-1, trainable=False, dtype=tf.int64),
optimizer=optimizer,
model=model,
metric=tf.Variable(1000, trainable=False, dtype=tf.float32))
manager = tf.train.CheckpointManager(ckpt, os.path.join('checkpoints', model.model_name), max_to_keep=5)
ckpt.restore(manager.latest_checkpoint)
if manager.latest_checkpoint:
print("Restored from {}".format(manager.latest_checkpoint))
else:
print("Initializing from scratch.")
# Define Losses
train_loss = tf.keras.metrics.Mean(name='train_loss', dtype=tf.float32)
train_cat_loss = tf.keras.metrics.Mean(name='train_cat_loss', dtype=tf.float32)
train_mask_loss = tf.keras.metrics.Mean(name='train_mask_loss', dtype=tf.float32)
val_loss = tf.keras.metrics.Mean(name='val_loss', dtype=tf.float32)
val_cat_loss = tf.keras.metrics.Mean(name='val_cat_loss', dtype=tf.float32)
val_mask_loss = tf.keras.metrics.Mean(name='val_mask_loss', dtype=tf.float32)
# Define metrics
train_acc = tf.keras.metrics.CategoricalAccuracy(name='train_acc', dtype=tf.float32)
train_meaniou = tf.keras.metrics.MeanIoU(num_classes=2, name='train_meaniou', dtype=tf.float32)
val_acc = tf.keras.metrics.CategoricalAccuracy(name='val_acc', dtype=tf.float32)
val_meaniou = tf.keras.metrics.MeanIoU(num_classes=2, name='val_meaniou', dtype=tf.float32)
# Create logger
log_dir = os.path.join('logs', model.model_name, datetime.now().strftime("%Y%m%d%H%M%S"))
summary_writer = tf.summary.create_file_writer(log_dir)
step = ckpt.step.numpy()
val_metric = ckpt.metric.numpy()
total_val_sample = 5000
progbar = None
start_time = time.perf_counter()
# Start training
for image, cat_true, mask_true in train_dataset:
ckpt.step.assign_add(1)
step += 1
# On epoch start
epoch_step = (step % TRAINING_PARAMETERS['steps_per_epoch']) + 1
if epoch_step == 1:
print("Epoch {}/{}".format((step // TRAINING_PARAMETERS['steps_per_epoch']) + 1, TRAINING_PARAMETERS['num_epoch']))
progbar = Progbar(TRAINING_PARAMETERS['steps_per_epoch'], interval=1, stateful_metrics=['train_acc', 'train_meaniou'])
total_loss, l_cate, l_mask = train_step(model,
optimizer,
loss_fn,
image,
cat_true,
mask_true,
train_acc,
train_meaniou)
values = [('train_loss', total_loss),
('train_cat_loss', l_cate),
('train_mask_loss', l_mask),
('train_acc', train_acc.result()),
('train_meaniou', train_meaniou.result())]
progbar.update(epoch_step, values)
train_loss.update_state(total_loss)
train_cat_loss.update_state(l_cate)
train_mask_loss.update_state(l_mask)
with summary_writer.as_default():
tf.summary.scalar('train loss', train_loss.result(), step=step)
tf.summary.scalar('train category loss', train_cat_loss.result(), step=step)
tf.summary.scalar('train mask loss', train_mask_loss.result(), step=step)
tf.summary.scalar('train accuracy', train_acc.result(), step=step)
tf.summary.scalar('train mean IoU', train_meaniou.result(), step=step)
# On epoch end
if epoch_step == TRAINING_PARAMETERS['steps_per_epoch']:
# Save checkpoint (weights, optimizer states)
save_path = manager.save()
print("Saved checkpoint: {}. Loss: {:1.2f}, acc: {:1.2f}, meanIoU: {:1.2f}".format(save_path, train_loss.result(), train_acc.result(), train_meaniou.result()))
# Validation
print("Start validation...")
val_progbar = Progbar(total_val_sample, interval=1, stateful_metrics=['val_acc', 'val_meaniou'])
val_step = 0
for image, cat_true, mask_true in val_dataset:
val_step += 1
total_loss, l_cate, l_mask = test_step(model,
loss_fn,
image,
cat_true,
mask_true,
val_acc,
val_meaniou)
values = [('val_loss', total_loss),
('val_cat_loss', l_cate),
('val_mask_loss', l_mask),
('val_acc', val_acc.result()),
('val_meaniou', val_meaniou.result())]
progbar.update(val_step, values)
val_loss.update_state(total_loss)
val_cat_loss.update_state(l_cate)
val_mask_loss.update_state(l_mask)
with summary_writer.as_default():
tf.summary.scalar('validation loss', val_loss.result(), step=step)
tf.summary.scalar('validation category loss', val_cat_loss.result(), step=step)
tf.summary.scalar('validation mask loss', val_mask_loss.result(), step=step)
tf.summary.scalar('validation accuracy', val_acc.result(), step=step)
tf.summary.scalar('validation mean IoU', val_meaniou.result(), step=step)
# Save new best weight
new_metric = (val_acc.result() + val_meaniou.result()) / 2
if val_metric < new_metric:
val_metric = new_metric
ckpt.metric.assign(new_metric)
weight_path = os.path.join('weights', model.model_name, 'weight_{}_{}_{}_{}_{}_{}_{}_{}.h5'.format(model.model_name, model.num_class, model.input_size, '_'.join([str(i) for i in model.grid_sizes]), model.head_style, model.head_depth, model.fpn_channel, new_metric))
print("Val acc: {}, Val meaniou: {}. Saving weight to {}...".format(val_acc.result(), val_meaniou.result(), weight_path))
model.save_weights(weight_path)
total_val_sample = val_step
# Reset metrics state
train_loss.reset_states()
train_cat_loss.reset_states()
train_mask_loss.reset_states()
val_loss.reset_states()
val_cat_loss.reset_states()
val_mask_loss.reset_states()
train_acc.reset_states()
val_acc.reset_states()
train_meaniou.reset_states()
val_meaniou.reset_states()
train_time = int(time.perf_counter() - start_time)
train_hour = train_time // 3600
train_time = train_time % 3600
train_minute = train_time // 60
train_second = train_time % 60
print("Total training time: {} h {} m {} s".format(train_hour, train_minute, train_second))
| 50.138655 | 281 | 0.611162 |
9be16ad025272fbd89cb45d3e090102853d8024d
| 799 |
py
|
Python
|
algorithms/ar-kmp/python3/knuth_morris_pratt.py
|
NuclearCactus/FOSSALGO
|
eb66f3bdcd6c42c66e8fc7110a32ac021596ca66
|
[
"MIT"
] | 59 |
2018-09-11T17:40:25.000Z
|
2022-03-03T14:40:39.000Z
|
algorithms/ar-kmp/python3/knuth_morris_pratt.py
|
RitvikDayal/FOSSALGO
|
ae225a5fffbd78d0dff83fd7b178ba47bfd7a769
|
[
"MIT"
] | 468 |
2018-08-28T17:04:29.000Z
|
2021-12-03T15:16:34.000Z
|
algorithms/ar-kmp/python3/knuth_morris_pratt.py
|
RitvikDayal/FOSSALGO
|
ae225a5fffbd78d0dff83fd7b178ba47bfd7a769
|
[
"MIT"
] | 253 |
2018-08-28T17:08:51.000Z
|
2021-11-01T12:30:39.000Z
|
#Python program for KMP Algorithm
def LPSArray(pat, M, lps):
lenn = 0
i = 1
while i < M:
if pat[i]== pat[lenn]:
lenn += 1
lps[i] = lenn
i += 1
else:
if lenn != 0:
lenn = lps[lenn-1]
else:
lps[i] = 0
i += 1
def KMP(pat, txt):
M = len(pat)
N = len(txt)
# create lps[] that will hold the longest prefix suffix values for pattern
lps = [0]*M
j = 0
# Preprocess the pattern (calculate lps[] array)
LPSArray(pat, M, lps)
i = 0 # index for txt[]
while i < N:
if pat[j] == txt[i]:
i += 1
j += 1
if j == M:
print ("Found pattern at index " + str(i-j))
j = lps[j-1]
# mismatch after j matches
elif i < N and pat[j] != txt[i]:
if j != 0:
j = lps[j-1]
else:
i += 1
txt = "ABABDABACDABABCABAB"
pat = "ABABCABAB"
KMP(pat, txt)
| 15.98 | 75 | 0.545682 |
500302a6edbc0f78c26797058326cae2f1dd7b5b
| 2,624 |
py
|
Python
|
test/test_npu/test_network_ops/test_dropout.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_dropout.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_dropout.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('..')
import torch
import numpy as np
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestDropOutDoMask(TestCase):
def cpu_op_exec(self, input):
out = torch.nn.Dropout(0.5)(input)
out = out.numpy()
return out
def npu_op_exec(self, input):
out = torch.nn.Dropout(0.5)(input)
out = out.to("cpu")
out = out.numpy()
return out
def dropout_list_exec(self, list):
epsilon = 1e-3
for item in list:
cpu_input1, npu_input1 = create_common_tensor(item, 0, 100)
if cpu_input1.dtype == torch.float16:
cpu_input1 = cpu_input1.to(torch.float32)
cpu_output = self.cpu_op_exec(cpu_input1)
npu_output = self.npu_op_exec(npu_input1)
cpu_output = cpu_output.astype(npu_output.dtype)
# 该算子随机结果的比较方式
for a, b in zip(cpu_output.flatten(), npu_output.flatten()):
if abs(a) > 0 and abs(b) > 0 and abs(a - b) > epsilon:
print(f'input = {item}, ERROR!')
break
else:
print(f'input = {item}, Successfully!')
def test_op_shape_format_fp16(self, device):
format_list = [0, 3, 29]
shape_list = [1, (256, 1280), (32, 3, 3), (256, 2048, 7, 7)]
shape_format = [
[np.float16, i, j] for i in format_list for j in shape_list
]
self.dropout_list_exec(shape_format)
def test_op_shape_format_fp32(self, device):
format_list = [0, 3, 29]
shape_list = [1, (256, 1280), (32, 3, 3), (256, 2048, 7, 7)]
shape_format = [
[np.float32, i, j] for i in format_list for j in shape_list
]
self.dropout_list_exec(shape_format)
instantiate_device_type_tests(TestDropOutDoMask, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
| 37.485714 | 77 | 0.640625 |
a844b552f292190f3c5fa040f3621afb025f7afe
| 7,164 |
py
|
Python
|
solutions/block_demo/.utility/python/transymodem.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | null | null | null |
solutions/block_demo/.utility/python/transymodem.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | null | null | null |
solutions/block_demo/.utility/python/transymodem.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# version 1.0.1
import os
import sys
import re
import codecs
import time
import json
import argparse
import inspect
from ymodemfile import YModemfile
try:
import serial
from serial.tools import miniterm
from serial.tools.list_ports import comports
except:
print("\n\nNot found pyserial, please install: \nsudo pip install pyserial")
sys.exit(0)
def read_json(json_file):
data = None
if os.path.isfile(json_file):
with open(json_file, 'r') as f:
data = json.load(f)
return data
def write_json(json_file, data):
with open(json_file, 'w') as f:
f.write(json.dumps(data, indent=4, separators=(',', ': ')))
def ymodemTrans(serialport, filename):
def sender_getc(size):
return serialport.read(size) or None
def sender_putc(data, timeout=15):
return serialport.write(data)
sender = YModemfile(sender_getc, sender_putc)
sent = sender.send_file(filename)
def send_check_recv_data(serialport, pattern, timeout):
""" receive serial data, and check it with pattern """
matcher = re.compile(pattern)
tic = time.time()
buff = serialport.read(128)
while (time.time() - tic) < timeout:
buff += serialport.read(128)
if matcher.search(buff):
return True
return False
def download_file(portnum, baudrate, filepath):
# open serial port first
serialport = serial.Serial()
serialport.port = portnum
serialport.baudrate = baudrate
serialport.parity = "N"
serialport.bytesize = 8
serialport.stopbits = 1
serialport.timeout = 0.05
try:
serialport.open()
except Exception as e:
raise Exception("Failed to open serial port: %s!" % portnum)
# send handshark world for check amp boot mode
mylist = [0xA5]
checkstatuslist = [0x5A]
bmatched = False
shakehand = False
count = 0
reboot_count = 0
# step 1: check system status
for i in range(300):
serialport.write(serial.to_bytes(checkstatuslist))
time.sleep(0.1)
buff = serialport.read(2)
print(buff)
# case 1: input == output is cli or repl mode
if((buff) == b'Z'):
# print('Read data OK');
reboot_count += 1
else:
# not cli or repl mode is running mode
print("Please reboot the board manually.")
break
if(reboot_count >= 4):
# need reboot system
print("Please reboot the board manually.")
break
# step 2: wait reboot and hand shakend cmd
time.sleep(1)
bmatched = send_check_recv_data(serialport, b'amp shakehand begin...', 10)
# print(buff)
if bmatched:
print('amp shakehand begin...')
for i in range(300):
serialport.write(serial.to_bytes(mylist))
time.sleep(0.1)
buff = serialport.read(2)
print(buff)
if((buff) == b'Z'):
# print('Read data OK');
count += 1
if(count >= 4):
shakehand = True
if shakehand:
break
if i > 5:
print("Please reboot the board manually.")
break
else:
print("Please reboot the board manually, and try it again.")
serialport.close()
return
# start send amp boot cmd
time.sleep(0.1)
print("start to send amp_boot cmd")
cmd = 'amp_boot'
serialport.write(cmd.encode())
# serialport.write(b'amp_boot')
# send file transfer cmd
time.sleep(0.1)
# print("start to send file cmd")
# cmd = 'cmd_file_transfer\n'
# serialport.write(cmd.encode())
bmatched = send_check_recv_data(serialport, b'amp shakehand success', 2)
# serialport.write(b'cmd_flash_js\n')
# send file
if bmatched:
print("start to send file cmd")
cmd = 'cmd_file_transfer\n'
serialport.write(cmd.encode())
print('amp shakehand success')
time.sleep(0.1)
ymodemTrans(serialport, filepath)
print("Ymodem transfer file finish")
# send file transfer cmd
time.sleep(0.1)
print("send cmd exit")
cmd = 'cmd_exit\n'
serialport.write(cmd.encode())
else:
print('amp shakehand failed, please reboot the boaard manually')
# close serialport
serialport.close()
def get_downloadconfig():
""" get configuration from .config_burn file, if it is not existed,
generate default configuration of chip_haas1000 """
configs = {}
configs['chip_haas1000'] = {}
configs['chip_haas1000']['serialport'] = ''
configs['chip_haas1000']['baudrate'] = ''
configs['chip_haas1000']['filepath'] = ''
return configs['chip_haas1000']
def main2():
cmd_parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='''Run and transfer file to system.''',)
cmd_parser.add_argument('-d', '--device', default='',
help='the serial device or the IP address of the pyboard')
cmd_parser.add_argument(
'-b', '--baudrate', default=115200, help='the baud rate of the serial device')
cmd_parser.add_argument('files', nargs='*', help='input transfer files')
args = cmd_parser.parse_args()
print(args)
# download file
# step 1: set config
downloadconfig = get_downloadconfig()
# step 2: get serial port
if not downloadconfig["serialport"]:
downloadconfig["serialport"] = args.device
if not downloadconfig["serialport"]:
downloadconfig["serialport"] = miniterm.ask_for_port()
if not downloadconfig["serialport"]:
print("no specified serial port")
return
else:
needsave = True
# step 3: get baudrate
if not downloadconfig["baudrate"]:
downloadconfig["baudrate"] = args.baudrate
if not downloadconfig["baudrate"]:
downloadconfig["baudrate"] = "115200"
# step 4: get transfer file
if not downloadconfig["filepath"]:
downloadconfig["filepath"] = args.files
if not downloadconfig["filepath"]:
print('no file wait to transfer')
return
if os.path.isabs("".join(downloadconfig["filepath"])):
filepath = "".join(downloadconfig["filepath"])
print('the filepath is abs path')
else:
basepath = os.path.abspath('.')
filepath = basepath + '/' + "".join(downloadconfig["filepath"])
print('the filepath is not abs path')
print("serial port is %s" % downloadconfig["serialport"])
print("transfer baudrate is %s" % downloadconfig["baudrate"])
# print(base_path(downloadconfig["filepath"]))
print("filepath is %s" % filepath)
# print("the settings were restored in the file %s" % os.path.join(os.getcwd(), '.config_burn'))
# step 3: download file
download_file(downloadconfig["serialport"],
downloadconfig['baudrate'], filepath)
if __name__ == "__main__":
main2()
| 28.887097 | 100 | 0.61139 |
a85d442ed83636a731ffbcfcd4c75ba8be7db01f
| 6,710 |
py
|
Python
|
src/onegov/swissvotes/views/votes.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/swissvotes/views/votes.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/swissvotes/views/votes.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from morepath.request import Response
from onegov.core.security import Private
from onegov.core.security import Public
from onegov.core.security import Secret
from onegov.form import Form
from onegov.swissvotes import _
from onegov.swissvotes import SwissvotesApp
from onegov.swissvotes.collections import SwissVoteCollection
from onegov.swissvotes.external_resources import MfgPosters
from onegov.swissvotes.external_resources import SaPosters
from onegov.swissvotes.forms import SearchForm
from onegov.swissvotes.forms import UpdateDatasetForm
from onegov.swissvotes.forms import UpdateExternalResourcesForm
from onegov.swissvotes.forms import UpdateMetadataForm
from onegov.swissvotes.layouts import DeleteVotesLayout
from onegov.swissvotes.layouts import UpdateExternalResourcesLayout
from onegov.swissvotes.layouts import UpdateMetadataLayout
from onegov.swissvotes.layouts import UpdateVotesLayout
from onegov.swissvotes.layouts import VotesLayout
from translationstring import TranslationString
@SwissvotesApp.form(
model=SwissVoteCollection,
permission=Public,
form=SearchForm,
template='votes.pt'
)
def view_votes(self, request, form):
if not form.errors:
form.apply_model(self)
return {
'layout': VotesLayout(self, request),
'form': form
}
@SwissvotesApp.form(
model=SwissVoteCollection,
permission=Private,
form=UpdateDatasetForm,
template='form.pt',
name='update'
)
def update_votes(self, request, form):
self = self.default()
layout = UpdateVotesLayout(self, request)
if form.submitted(request):
added, updated = self.update(form.dataset.data)
request.message(
_(
"Dataset updated (${added} added, ${updated} updated)",
mapping={'added': added, 'updated': updated}
),
'success'
)
# Warn if descriptor labels are missing
missing = set()
for vote in self.query():
for policy_area in vote.policy_areas:
missing |= set(
path for path in policy_area.label_path
if not isinstance(path, TranslationString)
)
if missing:
request.message(
_(
"The dataset contains unknown descriptors: ${items}.",
mapping={'items': ', '.join(sorted(missing))}
),
'warning'
)
return request.redirect(layout.votes_url)
return {
'layout': layout,
'form': form,
'cancel': request.link(self),
'button_text': _("Update"),
}
@SwissvotesApp.form(
model=SwissVoteCollection,
permission=Private,
form=UpdateMetadataForm,
template='form.pt',
name='update-metadata'
)
def update_metadata(self, request, form):
self = self.default()
layout = UpdateMetadataLayout(self, request)
if form.submitted(request):
added, updated = self.update_metadata(form.metadata.data)
request.message(
_(
"Metadata updated (${added} added, ${updated} updated)",
mapping={'added': added, 'updated': updated}
),
'success'
)
return request.redirect(layout.votes_url)
return {
'layout': layout,
'form': form,
'cancel': request.link(self),
'button_text': _("Update"),
}
@SwissvotesApp.form(
model=SwissVoteCollection,
permission=Private,
form=UpdateExternalResourcesForm,
template='form.pt',
name='update-external-resources'
)
def update_external_resources(self, request, form):
self = self.default()
layout = UpdateExternalResourcesLayout(self, request)
if form.submitted(request):
added_total = 0
updated_total = 0
removed_total = 0
failed_total = set()
for resource, cls in (
('mfg', MfgPosters(request.app.mfg_api_token)),
('sa', SaPosters())
):
if resource in form.resources.data:
added, updated, removed, failed = cls.fetch(request.session)
added_total += added
updated_total += updated
removed_total += removed
failed_total |= failed
request.message(
_(
'External resources updated (${added} added, '
'${updated} updated, ${removed} removed)',
mapping={
'added': added_total,
'updated': updated_total,
'removed': removed_total
}
),
'success'
)
if failed_total:
failed_total = ', '.join((
layout.format_bfs_number(item) for item in sorted(failed_total)
))
request.message(
_(
'Some external resources could not be updated: ${failed}',
mapping={'failed': failed_total}
),
'warning'
)
return request.redirect(layout.votes_url)
return {
'layout': layout,
'form': form,
'cancel': request.link(self),
'button_text': _("Update external resources"),
}
@SwissvotesApp.view(
model=SwissVoteCollection,
permission=Public,
name='csv'
)
def export_votes_csv(self, request):
return Response(
request.app.get_cached_dataset('csv'),
content_type='text/csv',
content_disposition='inline; filename=dataset.csv'
)
@SwissvotesApp.view(
model=SwissVoteCollection,
permission=Public,
name='xlsx'
)
def export_votes_xlsx(self, request):
return Response(
request.app.get_cached_dataset('xlsx'),
content_type=(
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
),
content_disposition='inline; filename=dataset.xlsx'
)
@SwissvotesApp.form(
model=SwissVoteCollection,
permission=Secret,
form=Form,
template='form.pt',
name='delete'
)
def delete_votes(self, request, form):
self = self.default()
layout = DeleteVotesLayout(self, request)
if form.submitted(request):
for vote in self.query():
request.session.delete(vote)
request.message(_("All votes deleted"), 'success')
return request.redirect(layout.votes_url)
return {
'layout': layout,
'form': form,
'message': _("Do you really want to delete all votes?!"),
'button_text': _("Delete"),
'button_class': 'alert',
'cancel': request.link(self)
}
| 28.432203 | 79 | 0.609836 |
766491d3189d2ce4581c010a835c2c7cde8bdabf
| 12,034 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/healthcare/doctype/lab_test/lab_test.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/healthcare/doctype/lab_test/lab_test.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/healthcare/doctype/lab_test/lab_test.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, ESS and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
import json
from frappe.utils import getdate
from erpnext.healthcare.doctype.healthcare_settings.healthcare_settings import get_receivable_account
from frappe import _
class LabTest(Document):
def on_submit(self):
frappe.db.set_value(self.doctype,self.name,"submitted_date", getdate())
insert_lab_test_to_medical_record(self)
frappe.db.set_value("Lab Test", self.name, "status", "Completed")
def on_cancel(self):
delete_lab_test_from_medical_record(self)
frappe.db.set_value("Lab Test", self.name, "status", "Cancelled")
self.reload()
def on_update(self):
if(self.sensitivity_test_items):
sensitivity = sorted(self.sensitivity_test_items, key=lambda x: x.antibiotic_sensitivity)
for i, item in enumerate(sensitivity):
item.idx = i+1
self.sensitivity_test_items = sensitivity
def after_insert(self):
if(self.prescription):
frappe.db.set_value("Lab Prescription", self.prescription, "test_created", 1)
if not self.test_name and self.template:
self.load_test_from_template()
self.reload()
def load_test_from_template(self):
lab_test = self
create_test_from_template(lab_test)
self.reload()
def create_test_from_template(lab_test):
template = frappe.get_doc("Lab Test Template", lab_test.template)
patient = frappe.get_doc("Patient", lab_test.patient)
lab_test.test_name = template.test_name
lab_test.result_date = getdate()
lab_test.department = template.department
lab_test.test_group = template.test_group
lab_test = create_sample_collection(lab_test, template, patient, None)
lab_test = load_result_format(lab_test, template, None, None)
@frappe.whitelist()
def update_status(status, name):
frappe.db.sql("""update `tabLab Test` set status=%s, approved_date=%s where name = %s""", (status, getdate(), name))
@frappe.whitelist()
def update_lab_test_print_sms_email_status(print_sms_email, name):
frappe.db.set_value("Lab Test",name,print_sms_email,1)
def create_lab_test_doc(invoice, consultation, patient, template):
#create Test Result for template, copy vals from Invoice
lab_test = frappe.new_doc("Lab Test")
if(invoice):
lab_test.invoice = invoice
if(consultation):
lab_test.physician = consultation.physician
lab_test.patient = patient.name
lab_test.patient_age = patient.get_age()
lab_test.patient_sex = patient.sex
lab_test.email = patient.email
lab_test.mobile = patient.mobile
lab_test.department = template.department
lab_test.test_name = template.test_name
lab_test.template = template.name
lab_test.test_group = template.test_group
lab_test.result_date = getdate()
lab_test.report_preference = patient.report_preference
return lab_test
def create_normals(template, lab_test):
lab_test.normal_toggle = "1"
normal = lab_test.append("normal_test_items")
normal.test_name = template.test_name
normal.test_uom = template.test_uom
normal.normal_range = template.test_normal_range
normal.require_result_value = 1
normal.template = template.name
def create_compounds(template, lab_test, is_group):
lab_test.normal_toggle = "1"
for normal_test_template in template.normal_test_templates:
normal = lab_test.append("normal_test_items")
if is_group:
normal.test_event = normal_test_template.test_event
else:
normal.test_name = normal_test_template.test_event
normal.test_uom = normal_test_template.test_uom
normal.normal_range = normal_test_template.normal_range
normal.require_result_value = 1
normal.template = template.name
def create_specials(template, lab_test):
lab_test.special_toggle = "1"
if(template.sensitivity):
lab_test.sensitivity_toggle = "1"
for special_test_template in template.special_test_template:
special = lab_test.append("special_test_items")
special.test_particulars = special_test_template.particulars
special.require_result_value = 1
special.template = template.name
def create_sample_doc(template, patient, invoice):
if(template.sample):
sample_exist = frappe.db.exists({
"doctype": "Sample Collection",
"patient": patient.name,
"docstatus": 0,
"sample": template.sample})
if sample_exist :
#Update Sample Collection by adding quantity
sample_collection = frappe.get_doc("Sample Collection",sample_exist[0][0])
quantity = int(sample_collection.sample_quantity)+int(template.sample_quantity)
if(template.sample_collection_details):
sample_collection_details = sample_collection.sample_collection_details+"\n==============\n"+"Test :"+template.test_name+"\n"+"Collection Detials:\n\t"+template.sample_collection_details
frappe.db.set_value("Sample Collection", sample_collection.name, "sample_collection_details",sample_collection_details)
frappe.db.set_value("Sample Collection", sample_collection.name, "sample_quantity",quantity)
else:
#create Sample Collection for template, copy vals from Invoice
sample_collection = frappe.new_doc("Sample Collection")
if(invoice):
sample_collection.invoice = invoice
sample_collection.patient = patient.name
sample_collection.patient_age = patient.get_age()
sample_collection.patient_sex = patient.sex
sample_collection.sample = template.sample
sample_collection.sample_uom = template.sample_uom
sample_collection.sample_quantity = template.sample_quantity
if(template.sample_collection_details):
sample_collection.sample_collection_details = "Test :"+template.test_name+"\n"+"Collection Detials:\n\t"+template.sample_collection_details
sample_collection.save(ignore_permissions=True)
return sample_collection
@frappe.whitelist()
def create_lab_test_from_desk(patient, template, prescription, invoice=None):
lab_test_exist = frappe.db.exists({
"doctype": "Lab Test",
"prescription": prescription
})
if lab_test_exist:
return
template = frappe.get_doc("Lab Test Template", template)
#skip the loop if there is no test_template for Item
if not (template):
return
patient = frappe.get_doc("Patient", patient)
consultation_id = frappe.get_value("Lab Prescription", prescription, "parent")
consultation = frappe.get_doc("Consultation", consultation_id)
lab_test = create_lab_test(patient, template, prescription, consultation, invoice)
return lab_test.name
def create_sample_collection(lab_test, template, patient, invoice):
if(frappe.db.get_value("Healthcare Settings", None, "require_sample_collection") == "1"):
sample_collection = create_sample_doc(template, patient, invoice)
if(sample_collection):
lab_test.sample = sample_collection.name
return lab_test
def load_result_format(lab_test, template, prescription, invoice):
if(template.test_template_type == 'Single'):
create_normals(template, lab_test)
elif(template.test_template_type == 'Compound'):
create_compounds(template, lab_test, False)
elif(template.test_template_type == 'Descriptive'):
create_specials(template, lab_test)
elif(template.test_template_type == 'Grouped'):
#iterate for each template in the group and create one result for all.
for test_group in template.test_groups:
#template_in_group = None
if(test_group.test_template):
template_in_group = frappe.get_doc("Lab Test Template",
test_group.test_template)
if(template_in_group):
if(template_in_group.test_template_type == 'Single'):
create_normals(template_in_group, lab_test)
elif(template_in_group.test_template_type == 'Compound'):
normal_heading = lab_test.append("normal_test_items")
normal_heading.test_name = template_in_group.test_name
normal_heading.require_result_value = 0
normal_heading.template = template_in_group.name
create_compounds(template_in_group, lab_test, True)
elif(template_in_group.test_template_type == 'Descriptive'):
special_heading = lab_test.append("special_test_items")
special_heading.test_name = template_in_group.test_name
special_heading.require_result_value = 0
special_heading.template = template_in_group.name
create_specials(template_in_group, lab_test)
else:
normal = lab_test.append("normal_test_items")
normal.test_name = test_group.group_event
normal.test_uom = test_group.group_test_uom
normal.normal_range = test_group.group_test_normal_range
normal.require_result_value = 1
normal.template = template.name
if(template.test_template_type != 'No Result'):
if(prescription):
lab_test.prescription = prescription
if(invoice):
frappe.db.set_value("Lab Prescription", prescription, "invoice", invoice)
lab_test.save(ignore_permissions=True) # insert the result
return lab_test
def create_lab_test(patient, template, prescription, consultation, invoice):
lab_test = create_lab_test_doc(invoice, consultation, patient, template)
lab_test = create_sample_collection(lab_test, template, patient, invoice)
lab_test = load_result_format(lab_test, template, prescription, invoice)
return lab_test
@frappe.whitelist()
def get_employee_by_user_id(user_id):
emp_id = frappe.db.get_value("Employee",{"user_id":user_id})
employee = frappe.get_doc("Employee",emp_id)
return employee
def insert_lab_test_to_medical_record(doc):
subject = str(doc.test_name)
if(doc.test_comment):
subject += ", "+str(doc.test_comment)
medical_record = frappe.new_doc("Patient Medical Record")
medical_record.patient = doc.patient
medical_record.subject = subject
medical_record.status = "Open"
medical_record.communication_date = doc.result_date
medical_record.reference_doctype = "Lab Test"
medical_record.reference_name = doc.name
medical_record.reference_owner = doc.owner
medical_record.save(ignore_permissions=True)
def delete_lab_test_from_medical_record(self):
medical_record_id = frappe.db.sql("select name from `tabPatient Medical Record` where reference_name=%s",(self.name))
if(medical_record_id[0][0]):
frappe.delete_doc("Patient Medical Record", medical_record_id[0][0])
def create_item_line(test_code, sales_invoice):
if test_code:
item = frappe.get_doc("Item", test_code)
if item:
if not item.disabled:
sales_invoice_line = sales_invoice.append("items")
sales_invoice_line.item_code = item.item_code
sales_invoice_line.item_name = item.item_name
sales_invoice_line.qty = 1.0
sales_invoice_line.description = item.description
@frappe.whitelist()
def create_invoice(company, patient, lab_tests, prescriptions):
test_ids = json.loads(lab_tests)
line_ids = json.loads(prescriptions)
if not test_ids and not line_ids:
return
sales_invoice = frappe.new_doc("Sales Invoice")
sales_invoice.customer = frappe.get_value("Patient", patient, "customer")
sales_invoice.due_date = getdate()
sales_invoice.is_pos = '0'
sales_invoice.debit_to = get_receivable_account(company)
for line in line_ids:
test_code = frappe.get_value("Lab Prescription", line, "test_code")
create_item_line(test_code, sales_invoice)
for test in test_ids:
template = frappe.get_value("Lab Test", test, "template")
test_code = frappe.get_value("Lab Test Template", template, "item")
create_item_line(test_code, sales_invoice)
sales_invoice.set_missing_values()
sales_invoice.save()
#set invoice in lab test
for test in test_ids:
frappe.db.set_value("Lab Test", test, "invoice", sales_invoice.name)
prescription = frappe.db.get_value("Lab Test", test, "prescription")
if prescription:
frappe.db.set_value("Lab Prescription", prescription, "invoice", sales_invoice.name)
#set invoice in prescription
for line in line_ids:
frappe.db.set_value("Lab Prescription", line, "invoice", sales_invoice.name)
return sales_invoice.name
@frappe.whitelist()
def get_lab_test_prescribed(patient):
return frappe.db.sql("""select cp.name, cp.test_code, cp.parent, cp.invoice, ct.physician, ct.consultation_date from tabConsultation ct,
`tabLab Prescription` cp where ct.patient=%s and cp.parent=ct.name and cp.test_created=0""", (patient))
| 40.655405 | 190 | 0.781951 |
4f933cca9a376532a3bc93f78b79788387ab7bbc
| 7,565 |
py
|
Python
|
GZP_GTO_ArcMap/scripts/SCR_PFLICHT_Layer.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 3 |
2019-06-18T15:28:09.000Z
|
2019-07-11T07:31:45.000Z
|
GZP_GTO_ArcMap/scripts/SCR_PFLICHT_Layer.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 2 |
2019-07-11T14:03:25.000Z
|
2021-02-08T16:14:04.000Z
|
GZP_GTO_ArcMap/scripts/SCR_PFLICHT_Layer.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 1 |
2019-06-12T11:07:37.000Z
|
2019-06-12T11:07:37.000Z
|
# -*- coding: utf-8 -*-
"""
@author: ms.gis, June 2020
Script for ArcGIS GTO for Modul GZP
"""
##
import arcpy
import pythonaddins
## -------------------------
# Open progress dialog
with pythonaddins.ProgressDialog as dialog:
dialog.title = "PRUEFUNG PFLICHTDATENSAETZE"
dialog.description = "Pruefe Pflichtdatensaetze ... Bitte warten..."
dialog.animation = "Spiral"
# --- Identify compulsory layers without entries/ features ---
# Create List for Message Content
lyrList = []
countBGef = 0
countObj = 0
# domainvalues of DOM_FG_V_KLASSE and DOM_WT_T_KLASSE
list_NOE_STM = { 1, 2, 3, 4, 5, 6, 7, 8, 9} # BWV regional NOE_STM
list_B_O_K_S_T_V_W = {10, 11, 12, 13, 14, 15, 16, 17, 18, 19} # BWV regional B_O_K_ST_V_W
list_AT_2021 = {20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30} # BWV national AT_2021
# Access current map document
mxd = arcpy.mapping.MapDocument("CURRENT")
# --- Check TABLES
# Clear all previous selections
for tbl in arcpy.mapping.ListTableViews(mxd):
arcpy.SelectLayerByAttribute_management(tbl.name, "CLEAR_SELECTION")
# Query tables
for tbl in arcpy.mapping.ListTableViews(mxd):
tblSrcName = tbl.datasetName
if tblSrcName in ["TBGEN", "TBGGN", "TBGZP", "TBPRJ"]:
result = arcpy.GetCount_management(tbl)
count = int(result.getOutput(0))
if count == 0:
lyrList.append(tblSrcName)
# --- Check FEATURE LAYERS
# Clear all previous selections
for lyr in arcpy.mapping.ListLayers(mxd):
if lyr.isFeatureLayer:
arcpy.SelectLayerByAttribute_management(lyr.name, "CLEAR_SELECTION")
# Eliminate multiple listed layers in TOC
lyr_set = set()
for feat in arcpy.mapping.ListLayers(mxd):
if feat.isFeatureLayer:
lyr_set.add((feat.datasetName, feat))
# Query tables
for (lyrSrcName, lyr) in sorted(lyr_set):
if lyrSrcName in ["FLUSS", "GSCHUTZ", "LPAKT", "MODEL", "PLGBT"]:
result = arcpy.GetCount_management(lyr)
count = int(result.getOutput(0))
if count == 0:
lyrList.append(lyrSrcName)
elif lyrSrcName == "BWERT":
listKat = []
with arcpy.da.SearchCursor(lyr, ["SZENARIO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that all szenarios (30, 100, 300) present
if not {30, 100, 300}.issubset(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName == "FG":
listKat = set()
with arcpy.da.SearchCursor(lyr, ["V_KLASSE"]) as cursor:
for row in cursor:
listKat.add(row[0])
# check if listKat are in only one group available
is_valid = False
if listKat.issubset(list_NOE_STM):
is_valid = True
if listKat.issubset(list_B_O_K_S_T_V_W):
is_valid = True
if listKat.issubset(list_AT_2021):
is_valid = True
if not is_valid:
lyrList.append(lyrSrcName)
elif lyrSrcName == "FUNKT":
listKat = []
with arcpy.da.SearchCursor(lyr, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that category "Rot-Gelb-schraffierter Funktionsbereich" (1) present
if 1 not in set(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName in ["GFPKT", "GFLIN", "GFFLA"]:
result = arcpy.GetCount_management(lyr)
countBGef += int(result.getOutput(0))
elif lyrSrcName == "GPLBAU":
listKat = []
with arcpy.da.SearchCursor(lyr, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that category "beplant od. verbaut" (1) present
if 1 not in set(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName == "GZ100":
# Access unfiltered source layer
SrcLayer = lyr.dataSource
listKat = []
with arcpy.da.SearchCursor(SrcLayer, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that categories (1, 2) present
if not {1, 2}.issubset(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName == "GZ300":
listKat = []
with arcpy.da.SearchCursor(lyr, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that category "Gelb-schraffierte Zone" (2) present
if 2 not in set(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName == "KNTPKT":
listKat = []
with arcpy.da.SearchCursor(lyr, ["SZENARIO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that all szenarios (30, 100, 300) present
if not {30, 100, 300}.issubset(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName in ["OBPKT", "OBLIN", "OBFLA"]:
result = arcpy.GetCount_management(lyr)
countObj += int(result.getOutput(0))
elif lyrSrcName == "QPLIN":
listKat = []
with arcpy.da.SearchCursor(lyr, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that at least categories 1 & 2 present
if not {1,2}.issubset(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName in ["UFHQN", "UFHQNLIN"]:
listKat = []
with arcpy.da.SearchCursor(lyr, ["L_KATEGO"]) as cursor:
for row in cursor:
listKat.append(row[0])
# Check that all scenario categories (1,2,3) present
if not {1, 2, 3}.issubset(listKat):
lyrList.append(lyrSrcName)
elif lyrSrcName == "WT":
listKat = set()
with arcpy.da.SearchCursor(lyr, ["T_KLASSE"]) as cursor:
for row in cursor:
listKat.add(row[0])
# check if listKat are in only one group available
is_valid = False
if listKat.issubset(list_NOE_STM):
is_valid = True
if listKat.issubset(list_B_O_K_S_T_V_W):
is_valid = True
if listKat.issubset(list_AT_2021):
is_valid = True
if not is_valid:
lyrList.append(lyrSrcName)
# Test if at least one feature of Besondere Gefährdungen or Objekte present
if countBGef == 0:
lyrList.append("GFPKT, GFLIN oder GFFLA")
if countObj == 0:
lyrList.append("OBPKT, OBLIN oder OBFLA")
##
MessageContent = ""
for l in lyrList:
MessageContent += "\n{}".format(l)
##
# Define Message
if len(lyrList) == 0:
pythonaddins.MessageBox("Pruefung erfolgreich.\nAlle Pflichtdatensaetze befuellt.", "INFORMATION", 0)
else:
MessageFinal = "Folgende Pflichtdatensaetze sind nicht (ausreichend) befuellt:\n" + MessageContent + "\n\nBitte korrigieren! \n"
pythonaddins.MessageBox(MessageFinal, "FEHLERMELDUNG", 0)
del lyrList
| 35.186047 | 132 | 0.556642 |
8c31138dac71ba403f727368ec698d659c9472d2
| 724 |
py
|
Python
|
Contrib-Inspur/openbmc/poky/meta/lib/oeqa/core/decorator/oetag.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 5 |
2019-11-11T07:57:26.000Z
|
2022-03-28T08:26:53.000Z
|
Contrib-Inspur/openbmc/poky/meta/lib/oeqa/core/decorator/oetag.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 3 |
2019-09-05T21:47:07.000Z
|
2019-09-17T18:10:45.000Z
|
Contrib-Inspur/openbmc/poky/meta/lib/oeqa/core/decorator/oetag.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 11 |
2019-07-20T00:16:32.000Z
|
2022-01-11T14:17:48.000Z
|
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
from . import OETestFilter, registerDecorator
from oeqa.core.utils.misc import strToList
def _tagFilter(tags, filters):
return False if set(tags) & set(filters) else True
@registerDecorator
class OETestTag(OETestFilter):
attrs = ('oetag',)
def bind(self, registry, case):
super(OETestTag, self).bind(registry, case)
self.oetag = strToList(self.oetag, 'oetag')
def filtrate(self, filters):
if filters.get('oetag'):
filterx = strToList(filters['oetag'], 'oetag')
del filters['oetag']
if _tagFilter(self.oetag, filterx):
return True
return False
| 25.857143 | 58 | 0.64779 |
50732a571e7539bcf8baed6e3621eb01035feddf
| 494 |
py
|
Python
|
transonic/_version.py
|
fluiddyn/transonic
|
a460e9f6d1139f79b668cb3306d1e8a7e190b72d
|
[
"BSD-3-Clause"
] | 88 |
2019-01-08T16:39:08.000Z
|
2022-02-06T14:19:23.000Z
|
transonic/_version.py
|
fluiddyn/transonic
|
a460e9f6d1139f79b668cb3306d1e8a7e190b72d
|
[
"BSD-3-Clause"
] | 13 |
2019-06-20T15:53:10.000Z
|
2021-02-09T11:03:29.000Z
|
transonic/_version.py
|
fluiddyn/transonic
|
a460e9f6d1139f79b668cb3306d1e8a7e190b72d
|
[
"BSD-3-Clause"
] | 1 |
2019-11-05T03:03:14.000Z
|
2019-11-05T03:03:14.000Z
|
__version__ = "0.4.11"
try:
from pyfiglet import figlet_format
__about__ = figlet_format("transonic", font="big")
except ImportError:
__about__ = r"""
_ _
| | (_)
| |_ _ __ __ _ _ __ ___ ___ _ __ _ ___
| __| '__/ _` | '_ \/ __|/ _ \| '_ \| |/ __|
| |_| | | (_| | | | \__ \ (_) | | | | | (__
\__|_| \__,_|_| |_|___/\___/|_| |_|_|\___|
"""
__about__ = __about__.rstrip() + f"{17 * ' '} v. {__version__}\n"
| 26 | 65 | 0.44332 |
50dab888c31c96d59e83e825951242229f8cf7db
| 1,316 |
py
|
Python
|
app/username/views.py
|
IoTServ/FlaskSimpleCMS
|
db0fc4464c6d514db14972156ca3e002a60a4876
|
[
"MIT"
] | null | null | null |
app/username/views.py
|
IoTServ/FlaskSimpleCMS
|
db0fc4464c6d514db14972156ca3e002a60a4876
|
[
"MIT"
] | 4 |
2020-08-29T16:11:12.000Z
|
2022-03-12T00:47:03.000Z
|
app/username/views.py
|
IoTServ/FlaskSimpleCMS
|
db0fc4464c6d514db14972156ca3e002a60a4876
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from StringIO import StringIO
from flask import send_file,redirect,url_for,flash
from . import username
from flask import render_template,request
from flask_login import login_required
from ..models import User,Article
@username.route('/<int:id>')
def detials(id):
user=User.query.get_or_404(id)
if user.confirmed==False:
flash('用户未确认邮箱!','danger')
return redirect(url_for('main.index'))
if user.banded==True:
flash('用户由于某种原因处于禁止状态!','danger')
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
pagination = Article.query.filter_by(author_id=id).order_by(Article.update_time.desc()).paginate(
page, per_page=3,
error_out=False)
articles = pagination.items
return render_template('username/user_info.html', user=user, articles=articles,
pagination=pagination, endpoint='.detials',id=id)
@username.route('/qrcode/<int:id>')
def qrcode(id):
import qrcode
img = qrcode.make("http://www.jiakaozuche.com/zhuye/"+str(id))
#img.save("./test.png")
return _serve_pil_image(img)
def _serve_pil_image(pil_img):
img_io = StringIO()
pil_img.save(img_io, 'PNG')
img_io.seek(0)
return send_file(img_io, mimetype='image/png', cache_timeout=0)
| 32.097561 | 101 | 0.68997 |
ba2119f32355417c2644809bb5d6b273bb820282
| 1,876 |
py
|
Python
|
ppyt/decorators.py
|
yusukemurayama/ppytrading
|
9804d0de870d77bf8a1c847736a636b1342d4600
|
[
"MIT"
] | 4 |
2016-08-16T07:47:15.000Z
|
2017-12-11T10:08:47.000Z
|
ppyt/decorators.py
|
yusukemurayama/ppytrading
|
9804d0de870d77bf8a1c847736a636b1342d4600
|
[
"MIT"
] | null | null | null |
ppyt/decorators.py
|
yusukemurayama/ppytrading
|
9804d0de870d77bf8a1c847736a636b1342d4600
|
[
"MIT"
] | 2 |
2018-06-15T04:43:15.000Z
|
2020-05-02T07:47:15.000Z
|
# coding: utf-8
import logging
from functools import wraps
from ppyt.exceptions import NoDataError
logger = logging.getLogger(__name__)
def handle_nodataerror(nodata_return):
"""NoDataErrorを処理するデコレータです。
このデコレータをつけておくと、内部でNoDataErrorが発生したときに[nodata_return]が返るようになります。
Args:
nodata_return: NoDataError発生時に返る値
Retusn:
関数・メソッドの実行結果
※関数・メソッドでNoDataErrorが発生したら、nodata_returnが返ります。
"""
def wrapper(func):
@wraps(func)
def inner(*args, **kwds):
try:
return func(*args, **kwds)
except NoDataError:
# NoDataErrorが投げられたらnodata_returnを返します。
return nodata_return
return inner
return wrapper
class cached_property(object):
"""プロパティの値をキャッシュします。それにより、2回目以降のアクセス時の負荷を下げます。
評価されたプロパティの結果は、そのプロパティが定義されているインスタンス自身に格納されます。"""
def __init__(self, func):
"""コンストラクタ
Args:
func: cache_propertyでデコレートされたメソッド
※cached_propertyをつけたときは、プロパティのように
()なしでメソッドが走るようになります。
"""
self._func = func
def __get__(self, obj, klass):
# プロパティが定義されているインスタンス自身から、cache_keyを使って辞書型の属性を取得します。
cache_key = '__CACHED_PROPERTY_DICT' # キャッシュデータ用のインスタンス変数名
cache = getattr(obj, cache_key, None)
if cache is None:
# まだ辞書型の属性がない場合は、インスタンスに追加しておきます。
cache = {}
setattr(obj, cache_key, cache)
propname = self._func.__name__ # プロパティの名前を取得します。
if propname not in cache:
# キャッシュされていない場合はメソッドを実行し、その結果をキャッシュします。
cache[propname] = self._func(obj)
logger.debug('propname[{}]をキャッシュしました。'.format(propname))
else:
# キャッシュにヒットしたことをログに書き込んでおきます。
logger.debug('propname[{}]をキャッシュから取得します。'.format(propname))
return cache[propname]
| 28 | 71 | 0.632196 |
e8b1cc345188ec2513c3e78f2624627d00892d95
| 3,585 |
py
|
Python
|
sentence_parser.py
|
hch-NLP/LTP
|
4eaba8d33c20127a5cf75e17c6bbcc62574dcfb1
|
[
"Apache-2.0"
] | 1 |
2020-11-23T05:04:18.000Z
|
2020-11-23T05:04:18.000Z
|
sentence_parser.py
|
hch-NLP/LTP
|
4eaba8d33c20127a5cf75e17c6bbcc62574dcfb1
|
[
"Apache-2.0"
] | null | null | null |
sentence_parser.py
|
hch-NLP/LTP
|
4eaba8d33c20127a5cf75e17c6bbcc62574dcfb1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf-8
# File: sentence_parser.py
# Author: HKH<[email protected]>
# Date: 18-3-10
import os
from pyltp import Segmentor, Postagger, Parser, NamedEntityRecognizer, SementicRoleLabeller
class LtpParser:
def __init__(self):
LTP_DIR = "F:\\LTP\\ltp_data_v3.4.0\\"
self.segmentor = Segmentor()
self.segmentor.load(os.path.join(LTP_DIR, "cws.model"))
self.postagger = Postagger()
self.postagger.load(os.path.join(LTP_DIR, "pos.model"))
self.parser = Parser()
self.parser.load(os.path.join(LTP_DIR, "parser.model"))
self.recognizer = NamedEntityRecognizer()
self.recognizer.load(os.path.join(LTP_DIR, "ner.model"))
self.labeller = SementicRoleLabeller()
self.labeller.load(os.path.join(LTP_DIR, 'pisrl_win.model'))
def release(self):
self.segmentor.release()
self.postagger.release()
self.recognizer.release()
self.parser.release()
self.labeller.release()
'''语义角色标注'''
def format_labelrole(self, words, postags):
arcs = self.parser.parse(words, postags)
roles = self.labeller.label(words, postags, arcs)
roles_dict = {}
for role in roles:
roles_dict[role.index] = {arg.name:[arg.name,arg.range.start, arg.range.end] for arg in role.arguments}
return roles_dict
'''句法分析---为句子中的每个词语维护一个保存句法依存儿子节点的字典'''
def build_parse_child_dict(self, words, postags, arcs):
child_dict_list = []
format_parse_list = []
for index in range(len(words)):
child_dict = dict()
for arc_index in range(len(arcs)):
if arcs[arc_index].head == index+1: #arcs的索引从1开始
if arcs[arc_index].relation in child_dict:
child_dict[arcs[arc_index].relation].append(arc_index)
else:
child_dict[arcs[arc_index].relation] = []
child_dict[arcs[arc_index].relation].append(arc_index)
child_dict_list.append(child_dict)
rely_id = [arc.head for arc in arcs] # 提取依存父节点id
relation = [arc.relation for arc in arcs] # 提取依存关系
heads = ['Root' if id == 0 else words[id - 1] for id in rely_id] # 匹配依存父节点词语
for i in range(len(words)):
# ['ATT', '李克强', 0, 'nh', '总理', 1, 'n']
a = [relation[i], words[i], i, postags[i], heads[i], rely_id[i]-1, postags[rely_id[i]-1]]
format_parse_list.append(a)
return child_dict_list, format_parse_list
'''parser主函数'''
def parser_main(self, sentence):
words = list(self.segmentor.segment(sentence))
postags = list(self.postagger.postag(words))
arcs = self.parser.parse(words, postags)
child_dict_list, format_parse_list = self.build_parse_child_dict(words, postags, arcs)
roles_dict = self.format_labelrole(words, postags)
return words, postags, child_dict_list, roles_dict, format_parse_list
if __name__ == '__main__':
parse = LtpParser()
sentence = '书房里有电脑、有音响。'#《离开》是由张宇谱曲,演唱。
words, postags, child_dict_list, roles_dict, format_parse_list = parse.parser_main(sentence)
print(words, len(words))
print(postags, len(postags))
print(child_dict_list, len(child_dict_list))
print(roles_dict)
print(format_parse_list, len(format_parse_list))
# for data in format_parse_list:
# if data[0]=='HED':
# print(data[1])
| 42.176471 | 116 | 0.614226 |
a8fd55aaa3ed3b16b9a4ac6ecf440894a3b0fc20
| 7,186 |
py
|
Python
|
skaffold-STABLE/webapp/hello.py
|
LennartFertig/BigData
|
e74761b16812fd034519c06897329ea9ba9968df
|
[
"Apache-2.0"
] | null | null | null |
skaffold-STABLE/webapp/hello.py
|
LennartFertig/BigData
|
e74761b16812fd034519c06897329ea9ba9968df
|
[
"Apache-2.0"
] | null | null | null |
skaffold-STABLE/webapp/hello.py
|
LennartFertig/BigData
|
e74761b16812fd034519c06897329ea9ba9968df
|
[
"Apache-2.0"
] | 1 |
2021-10-19T07:45:12.000Z
|
2021-10-19T07:45:12.000Z
|
from flask import Flask, render_template, request, redirect, url_for, flash
import emoji
import socket
import psycopg2
from pymemcache.client.base import Client
from essential_generators import DocumentGenerator
from kafka import KafkaProducer
# Lennart, 26.8
# from flask_caching import Cache
client = Client('memcached-service')
app=Flask(__name__)
# Test Zugriff auf den Webserver
@app.route('/')
def Index():
return render_template('index.html')
# Test Cacheserver, Lennart, 26.08.
# Die Verbindung zur Datenbank steht bereits.
@app.route('/deployment')
def depl():
## Datenabfrage aus Cacheserver
cache_result = client.get('flights')
## Wenn keine Daten im Cache, ziehe aus der Datenbank
if cache_result is None: #flights nicht verfügbar
con = psycopg2.connect("host=postgres port=5432 dbname=kranichairline_db user=postgres password=postgres")
cur = con.cursor()
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
client.set('flights', data)
return render_template('index3.html', data=data)
else:
#### TODO: Ausgabeformat ist noch nicht schön
# Wenn verfügbar, nehme die Daten aus dem Cache
data=cache_result
return render_template('index3.html', data=data)
# except Exception as e:
# data=e
# return emoji.emojize('Cacheserver ist :poop:', use_aliases=True)
# Funktion zum Senden der Daten an das Kafka-Topic, die bei Klick des Buttons aufgerufen wird
@app.route('/kafka')
def your_flask_funtion():
# Senden Bei Klick
producer = KafkaProducer(bootstrap_servers='my-cluster-kafka-bootstrap:9092')
next_click = "KLICK GEHT"
# print(f"Sending message: {next_click}")
future = producer.send("1337datascience", next_click.encode())
result = future.get(timeout=5)
# print(f"Result: {result}")
return emoji.emojize(':thumbsup:', use_aliases=True)
###### Entwurf
### Alternativ könnte man eine seite bauen, die solange der user sich darauf befindet nachrichten in das Topic sendet
# und so das Interesse der Nutzer abschätzen und dementsprechen die Preise erhöhen
@app.route('/zeitbasiert')
def timed_producer():
producer = KafkaProducer(bootstrap_servers='my-cluster-kafka-bootstrap:9092')
while True:
next_msg = "nochda"
print(f"Sending message: {next_msg}")
future = producer.send("1337datascience", next_msg.encode())
result = future.get(timeout=10)
print(f"Result: {result}")
time.sleep(5)
############### Ab hier sind alles Testseiten ################
# Test des Datenbankzugriffs
@app.route('/cachetest')
def test():
## Datenabfrage aus Cacheserver
cache_result = client.get('flights')
## Wenn keine Daten im Cache, ziehe aus der Datenbank
if cache_result is None: #flights nicht verfügbar
con = psycopg2.connect("host=postgres port=5432 dbname=kranichairline_db user=postgres password=postgres")
cur = con.cursor()
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
client.set('flights', data)
return emoji.emojize('Daten waren nicht im Cacheserver :thumbsdown:', use_aliases=True)
else:
# Wenn verfügbar, nehme die Daten aus dem Cache
data=cache_result
return emoji.emojize('Daten waren im Cacheserver :thumbsup:', use_aliases=True)
# except Exception as e:
# data=e
# return emoji.emojize('Cacheserver ist :poop:', use_aliases=True)
# Test des Datenbankzugriffs
@app.route('/dbtest')
def dbtest():
con = psycopg2.connect("host=postgres port=5432 dbname=kranichairline_db user=postgres password=postgres")
cur = con.cursor()
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
return render_template('index3.html', data=data)
# Test ob der service mit DNS erreichbar ist - aktuelle IP einfügen
# UPDATE 24.08.
# Fehler bei der DNS-Erreichbarkeit lag an "k delete --all --all-namespaces", was auch den DNS-Pod löscht
@app.route('/servicetest')
def servicetest():
try:
con = psycopg2.connect("host=10.101.162.210 port=5432 dbname=kranichairline_db user=postgres password=postgres")
print('+=========================+')
print('| CONNECTED TO DATABASE |')
print('+=========================+')
# cursor = conn.cursor()
# print("test")
# print(cursor.execute("SELECT * FROM flights"))
cur = con.cursor()
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
return render_template('index3.html', data=data)
except Exception as e:
data=e
return emoji.emojize('Datenbank :poop:', use_aliases=True)
# Test ob der Postgres-Pod mit IP erreichbar ist, aktuelle IP einfügen
@app.route('/podtest')
def podtest():
try:
con = psycopg2.connect("host=172.17.0.5 port=5432 dbname=kranichairline_db user=postgres password=postgres")
print('+=========================+')
print('| CONNECTED TO DATABASE |')
print('+=========================+')
# cursor = conn.cursor()
# print("test")
# print(cursor.execute("SELECT * FROM flights"))
cur = con.cursor()
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
return render_template('index3.html', data=data)
except Exception as e:
data=e
return emoji.emojize('Datenbank :poop:', use_aliases=True)
# test ob sich die preise ändern lassen
@app.route('/changedb')
def changetest():
try:
con = psycopg2.connect("host=postgres port=5432 dbname=kranichairline_db user=postgres password=postgres")
cur = con.cursor()
cur.execute("UPDATE flights SET price= price + (price * 10 / 100) ")
cur.execute("select * from flights")
data = cur.fetchall()
cur.close()
return render_template('index3.html', data=data)
except Exception as e:
data=e
return emoji.emojize('Datenbank-Schreiben :poop:', use_aliases=True)
@app.route('/kafkaread')
# Test ob sich Messages lesen lassen
def kafkaread():
from kafka import KafkaConsumer
# The bootstrap server to connect to
bootstrap = 'my-cluster-kafka-kafka-bootstrap:9092'
# Create a comsumer instance
# cf.
print('Starting KafkaConsumer')
consumer = KafkaConsumer('1337datascience', # <-- topics
bootstrap_servers=bootstrap)
# Print out all received messages
data=[]
for msg in consumer:
data.append(msg)
return render_template('index3.html', data=data)
@app.route('/kafkaread2')
def kafkaread2():
from kafka import KafkaConsumer
# The bootstrap server to connect to
bootstrap = 'my-cluster-kafka-kafka-bootstrap:9092'
# Create a comsumer instance
# cf.
print('Starting KafkaConsumer')
consumer = KafkaConsumer('1337datascience', # <-- topics
bootstrap_servers=bootstrap)
# Print out all received messages
data=[]
for msg in consumer:
data.append(msg)
return data
| 34.883495 | 120 | 0.655998 |
0f2110fe2a5c2a18715a941c83b81ee45eb98923
| 320 |
py
|
Python
|
udacity course code/01-03-numpyarrayattributes.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | 1 |
2021-03-22T22:25:54.000Z
|
2021-03-22T22:25:54.000Z
|
udacity course code/01-03-numpyarrayattributes.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | 6 |
2017-01-16T09:53:21.000Z
|
2017-01-18T12:20:09.000Z
|
udacity course code/01-03-numpyarrayattributes.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | null | null | null |
"""Array attributes."""
import numpy as np
def test_run():
# Generate an array full of random numbers, uniformly distributed from [0.0, 1.0)
a = np.random.random((5, 4))
print a
print a.shape
print len(a,shape)
print a.size
print a.dtype
if __name__ == "__main__":
test_run()
| 18.823529 | 85 | 0.61875 |
0f3d9e11e63c47d832dad04123d10ac7e6934e64
| 2,065 |
py
|
Python
|
private_sdk/signature.py
|
teambge/bge-private-sdk
|
b27d4a6caf35bcb89a260938260fd75dba173311
|
[
"MIT"
] | null | null | null |
private_sdk/signature.py
|
teambge/bge-private-sdk
|
b27d4a6caf35bcb89a260938260fd75dba173311
|
[
"MIT"
] | null | null | null |
private_sdk/signature.py
|
teambge/bge-private-sdk
|
b27d4a6caf35bcb89a260938260fd75dba173311
|
[
"MIT"
] | null | null | null |
#-*- coding: utf-8 -*-
from base64 import b64encode
from datetime import datetime, timezone, timedelta
from uuid import uuid4
try:
from urllib import quote, quote_plus
except ImportError:
from urllib.parse import quote, quote_plus
import string
import hmac
class Signature(object):
salt = string.ascii_letters
def __init__(self, client_secret, expiration_time=300):
self.client_secret = client_secret
self.expiration_time = expiration_time
def get_timestamp(self):
return datetime.now(tz=timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
def is_expired(self, timestamp):
now = datetime.now(tz=timezone.utc)
timestamp = datetime.strptime(
timestamp, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc)
return now > (timestamp + timedelta(seconds=self.expiration_time))
def get_sign_nonce(self):
return uuid4().hex
def _get_stringtosign(self, params):
t = []
items = list(params.items())
items.sort(key=lambda i: i[0])
for key, value in items:
if value is None:
continue
key = quote_plus(key)
value = quote_plus(str(value))
value = value.replace('%7E', '~').replace('+', '%20')
t.append('%s=%s' % (key, value))
qs = '&'.join(t)
qs = quote_plus(qs).replace('%7E', '~').replace('+', '%20')
return qs
def _make_signed_string(self, params):
text = self._get_stringtosign(params)
message = '&'.join([self.salt, text])
key = (self.client_secret + '&').encode('utf-8')
message = message.encode('utf-8')
h = hmac.new(key, message, digestmod='sha1')
return b64encode(h.digest()).decode('utf-8')
def sign(self, params):
return self._make_signed_string(params)
def verify(self, params, signed_string):
timestamp = params['timestamp']
if self.is_expired(timestamp):
return False
return self._make_signed_string(params) == signed_string
| 31.287879 | 75 | 0.614528 |
0e3290b35fe588287504328fa4f6b276bb7421d0
| 911 |
py
|
Python
|
jumeaux/addons/final/json.py
|
ihatov08/jumeaux
|
7d983474df4b6dcfa57ea1a66901fbc99ebababa
|
[
"MIT"
] | 11 |
2017-10-02T01:29:12.000Z
|
2022-03-31T08:37:22.000Z
|
jumeaux/addons/final/json.py
|
ihatov08/jumeaux
|
7d983474df4b6dcfa57ea1a66901fbc99ebababa
|
[
"MIT"
] | 79 |
2017-07-16T14:47:17.000Z
|
2022-03-31T08:49:14.000Z
|
jumeaux/addons/final/json.py
|
ihatov08/jumeaux
|
7d983474df4b6dcfa57ea1a66901fbc99ebababa
|
[
"MIT"
] | 2 |
2019-01-28T06:11:58.000Z
|
2021-01-25T07:21:21.000Z
|
# -*- coding:utf-8 -*-
from owlmixin import OwlMixin, TOption
from jumeaux.addons.final import FinalExecutor
from jumeaux.models import FinalAddOnPayload, FinalAddOnReference
from jumeaux.logger import Logger
logger: Logger = Logger(__name__)
class Config(OwlMixin):
sysout: bool = False
indent: TOption[int]
class Executor(FinalExecutor):
def __init__(self, config: dict):
self.config: Config = Config.from_dict(config or {})
def exec(self, payload: FinalAddOnPayload, reference: FinalAddOnReference) -> FinalAddOnPayload:
if self.config.sysout:
print(payload.report.to_json(indent=self.config.indent.get()))
else:
payload.report.to_jsonf(
f"{payload.result_path}/report.json",
encoding=payload.output_summary.encoding,
indent=self.config.indent.get(),
)
return payload
| 29.387097 | 100 | 0.675082 |
0e9efacad3b0ff51bec1249b715d4eda1c1e68af
| 5,962 |
py
|
Python
|
pvtool/routes/_measurement.py
|
schmocker/pv-FHNW
|
5066e0bc7ce76be5d1a930b50034c746b232a9f8
|
[
"MIT"
] | 1 |
2019-10-31T13:34:12.000Z
|
2019-10-31T13:34:12.000Z
|
pvtool/routes/_measurement.py
|
schmocker/pv-FHNW
|
5066e0bc7ce76be5d1a930b50034c746b232a9f8
|
[
"MIT"
] | 1 |
2019-05-27T13:03:25.000Z
|
2019-05-27T13:03:25.000Z
|
pvtool/routes/_measurement.py
|
schmocker/pv-FHNW
|
5066e0bc7ce76be5d1a930b50034c746b232a9f8
|
[
"MIT"
] | null | null | null |
"""Overview of all Measurements and linked functions such as uploading removing and single view of measurement"""
import os
from werkzeug.utils import secure_filename
from flask import Blueprint, render_template, request, redirect, flash, g, current_app, url_for
from flask_login import current_user, login_required
from ..db import db, Measurement, PvModule, MeasurementValues
from ..forms import MeasurementForm
from ..file_upload import UPLOAD_FOLDER, allowed_file, process_data_file, InvalidFileType,\
process_multiple_measurements_file
from ._users import add_timestamp, requires_access_level
measurement_routes = Blueprint('measurement', __name__, template_folder='templates')
@measurement_routes.route('/measurements')
def measurements():
"""Display all measurements as table with clickable individual measurements"""
measurements_for_displaying = db.session.query(Measurement).all()
return render_template('measurement/measurements.html', measurements=measurements_for_displaying)
@measurement_routes.route('/measurement')
def measurement():
"""Display a single measurement with link to removal, plot and returning to all measurements"""
try:
meas_id = request.args.get('id', type=int)
if meas_id is None:
raise Exception(f'no valid id for pv module')
meas = db.session.query(Measurement).get(meas_id)
meas_values = db.session.query(MeasurementValues).filter(MeasurementValues.measurement_id == meas_id).all()
print(meas_values)
if meas is None:
raise Exception(f'no measurement with id {meas_id} exists')
return render_template('measurement/measurement.html', measurement=meas, measurement_values=meas_values)
except Exception as e:
flash(str(e), category='danger')
return redirect('measurements')
@measurement_routes.route('/measurement/remove')
@requires_access_level('Admin')
def remove_measurement():
"""Remove the individual measurement and its corresponding measurement values, does not affect the user"""
meas_id = request.args.get('id', type=int)
if meas_id is not None:
db.session.query(Measurement).filter(Measurement.id == meas_id).delete()
db.session.commit()
return redirect('/measurements')
@measurement_routes.route('/add_measurement', methods=['GET', 'POST'])
@login_required
def add_measurement():
"""Form to add measurement with populated pvmodules field"""
form = MeasurementForm()
modules = db.session.query(PvModule).all()
current_user_data = current_user.__dict__
user = {'students': current_user_data['student1'] + ', ' +
current_user_data['student2'] + ', ' +
current_user_data['student3'],
'meas_series': current_user_data['user_name']}
form.pv_modul.choices = []
# Every user can only insert one measurement
if db.session.query(Measurement).filter(Measurement.measurement_series == user['meas_series']).first() is not None\
and user['meas_series'] != 'admin':
print(db.session.query(Measurement).filter(Measurement.measurement_series == user['meas_series']).first())
flash('Sie haben bereits eine Messung hinzugefügt.', category='danger')
return redirect(url_for('measurement.measurements'))
# populate select field with available distinct modules
for module in modules:
if (module.model, str(module.manufacturer) + ' ' + str(module.model)) not in form.pv_modul.choices:
form.pv_modul.choices.append((module.model, str(module.manufacturer) + ' ' + str(module.model)))
if request.method == 'POST':
chosen_module = db.session.query(PvModule).filter(PvModule.model == form.pv_modul.data).first()
# noinspection PyArgumentList
new_measurement = Measurement(date=form.mess_datum.data,
measurement_series=user['meas_series'],
producer=user['students'],
)
# save file that was uploaded
# if form.validate_on_submit():
f = form.messungen.data
filename = secure_filename(f.filename)
if not allowed_file(filename):
flash('Ungültiges Dateiformat.', category='danger')
return redirect(url_for('measurement.measurements'))
f.save(os.path.join(UPLOAD_FOLDER, filename))
chosen_module.measurements.append(new_measurement)
try:
process_data_file(filename, new_measurement)
except InvalidFileType:
flash('Messung hochladen fehlgeschlagen!', category='danger')
return redirect(url_for('measurement.measurements'))
db.session.add(chosen_module)
db.session.commit()
add_timestamp()
flash('Messung erfolgreich hinzugefügt.', category='success')
return redirect(url_for('measurement.measurements'))
# flash current user
flash('Angemeldet als:', )
flash(current_user_data['user_name'], category='primary')
return render_template('measurement/add_measurement.html', form=form, user=user)
@measurement_routes.route('/add_measurements', methods=['GET', 'POST'])
@requires_access_level('Admin')
def add_measurements():
"""Form to add measurement from excel, multiple measurements possible"""
form = MeasurementForm()
if request.method == 'POST':
f = form.messungen.data
filename = secure_filename(f.filename)
if not allowed_file(filename):
flash('Ungültiges Dateiformat.', category='danger')
return redirect(url_for('measurement.measurements'))
path_to_file = os.path.join(UPLOAD_FOLDER, filename)
f.save(path_to_file)
process_multiple_measurements_file(filename)
return redirect(url_for('measurement.measurements'))
return render_template('measurement/add_measurements.html', form=form)
| 43.202899 | 119 | 0.69423 |
383966bec51a678c93133bca8d324981bf23e90d
| 7,526 |
py
|
Python
|
sdd-db/cronjobs/db_upload_energy.py
|
socialdistancingdashboard/virushack
|
6ef69d26c5719d0bf257f4594ed2488dd73cdc40
|
[
"Apache-2.0"
] | 29 |
2020-03-21T00:47:51.000Z
|
2021-07-17T15:50:33.000Z
|
sdd-db/cronjobs/db_upload_energy.py
|
socialdistancingdashboard/virushack
|
6ef69d26c5719d0bf257f4594ed2488dd73cdc40
|
[
"Apache-2.0"
] | 7 |
2020-03-21T14:04:26.000Z
|
2022-03-02T08:05:40.000Z
|
sdd-db/cronjobs/db_upload_energy.py
|
socialdistancingdashboard/virushack
|
6ef69d26c5719d0bf257f4594ed2488dd73cdc40
|
[
"Apache-2.0"
] | 13 |
2020-03-21T01:08:08.000Z
|
2020-04-08T17:21:11.000Z
|
""" Uploads Corona data from Zeit online
Note: infected numbers are known infections on a particular day.
Dead and Recovered numbers were summed up until today. """
import os
import pandas as pd
from datetime import datetime, timedelta
import pytz
# compatibility with ipython
try:
__IPYTHON__
os.chdir(os.path.dirname(__file__))
except: pass
import json
import pymysql
from pymysql.constants import CLIENT
from sqlalchemy import create_engine
from sqlalchemy.pool import NullPool
import requests
from hashlib import md5
# connect to aws database with sqlalchemy (used for pandas connections)
config = json.load(open("../../credentials/credentials-aws-db.json", "r"))
aws_engine = create_engine(
("mysql+pymysql://" +
config["user"] + ":" +
config["password"] + "@" +
config["host"] + ":" +
str(config["port"]) + "/" +
config["database"]),
poolclass=NullPool, # dont maintain a pool of connections
pool_recycle=3600 # handles timeouts better, I think...
)
# aws database connection used for normal queries because sqlalchemy doesnt support on duplicate key queries
pymysql_con = pymysql.connect(
config["host"],
config["user"],
config["password"],
config["database"],
client_flag=CLIENT.MULTI_STATEMENTS)
charts_whitelist = [
"import balance",
"load",
"day ahead auction",
"intraday continuous average price",
"intraday continuous id3 price",
"intraday continuous id1 price"
]
description_lookup = {
"import balance": {
"desc_short": "Netto Stromimporte",
"desc_long": "Netto Stromimporte",
"unit": "Gigawatt",
"unit_agg": "Gigawatt",
"agg_mode": "sum"
},
"load": {
"desc_short": "Stromverbrauch",
"desc_long": "Stromverbrauch",
"unit": "Gigawatt",
"unit_agg": "Gigawatt",
"agg_mode": "sum"
},
"day ahead auction": {
"desc_short": "Day-ahead Strompreis",
"desc_long": "Day-ahead Strompreise",
"unit": "EUR/MWh",
"unit_agg": "Prozent",
"agg_mode": "avg-percentage-of-normal"
},
"intraday continuous average price": {
"desc_short": "Strompreis Index IDFull",
"desc_long": "The IDFull index is the weighted average price of all continuous trades executed during the full trading session of any EPEX SPOT continuous contract. This index includes the entire market liquidity and thus represents the obvious continuous market price references for each contract.",
"unit": "EUR/MWh",
"unit_agg": "Prozent",
"agg_mode": "avg-percentage-of-normal"
},
"intraday continuous id3 price": {
"desc_short": "Strompreis Index ID3",
"desc_long": "The ID3 index is the weighted average price of all continuous trades executed within the last 3 trading hours of a contract (up to 30min before delivery start).This index focuses on the most liquid timeframe of a continuous contract trading session. As such, this index presents large business interest for EPEX SPOT customers to market their offers or challenge their trading activity.",
"unit": "EUR/MWh",
"unit_agg": "Prozent",
"agg_mode": "avg-percentage-of-normal"
},
"intraday continuous id1 price": {
"desc_short": "Strompreis Index ID1",
"desc_long": "The ID1 index is the weighted average price of all continuous trades executed within the last trading hour of a contract up to 30min before delivery start. This index catches the market last minute imbalance needs, reflecting amongst other the increasing REN breakthrough and system balancing flexibility.",
"unit": "EUR/MWh",
"unit_agg": "Prozent",
"agg_mode": "avg-percentage-of-normal"
}
}
# retrieve data from fraunhofer ise
def upload_week(week):
url = f"https://www.energy-charts.de/price/week_2020_{week}.json"
r = requests.get(url)
data = r.json()
for chart in data:
chart_key = chart["key"][0]["en"].lower().replace("-", " ")
if not chart_key in charts_whitelist:
continue
print(f"current chart_key {chart_key}")
source_id = ("score fraunhofer " + chart_key).replace(" ", "_")
source = {
"id": source_id,
"desc_short": description_lookup[chart_key]["desc_short"],
"desc_long": description_lookup[chart_key]["desc_long"] ,
"contributors": "Fraunhofer ISI, 50 Hertz, Amprion, Tennet, TransnetBW, EEX, EPEX SPOT",
"unit": description_lookup[chart_key]["unit"],
"unit_long": description_lookup[chart_key]["unit"],
"unit_agg_long": description_lookup[chart_key]["unit_agg"],
"sample_interval": "hourly",
"agg_mode": description_lookup[chart_key]["agg_mode"],
"has_reference_values": 0,
"spatial_level": "country"
}
q = """
REPLACE INTO sources (
id, desc_short, desc_long, contributors, unit, unit_long, unit_agg_long,
sample_interval, agg_mode, has_reference_values, spatial_level )
VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
with pymysql_con.cursor() as cur:
cur.execute(q, list(source.values()))
pymysql_con.commit()
country_id = "DE"
unique_index = source_id + country_id
station = {
"source_id": source_id,
"description": "country-level data",
"source_station_id": "country-level data",
"country_id": country_id,
"unique_index": md5(unique_index.encode("utf-8")).hexdigest()
}
q = """
INSERT INTO stations ( source_id, description, source_station_id, country_id, unique_index )
VALUES ( %s, %s, %s, %s, %s )
ON DUPLICATE KEY UPDATE
source_id = VALUES(source_id),
description = VALUES(description),
source_station_id = VALUES(source_station_id),
country_id = VALUES(country_id)
"""
with pymysql_con.cursor() as cur:
cur.execute(q, list(station.values()))
pymysql_con.commit()
q = """
SELECT id AS station_id FROM stations
WHERE source_id = '%s'
""" % source_id
scores_stations_foreign_key = pd.read_sql(q, aws_engine)["station_id"].iloc[0]
# remove trailing zeros
drop_index = len(chart["values"])
while chart["values"][drop_index-1][1] == 0:
drop_index = drop_index - 1
df_scores = pd.DataFrame(chart["values"][:drop_index], columns=["dt", "score_value"])
df_scores.dropna(inplace=True)
df_scores.dt = df_scores.dt.apply(lambda x: datetime.fromtimestamp(x / 1000))
df_scores['dt'] = df_scores['dt'].astype(str)
df_scores["source_id"] = source_id
df_scores["station_id"] = scores_stations_foreign_key
q = """
INSERT INTO scores ( dt, score_value, source_id, station_id )
VALUES (%s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
score_value = VALUES(score_value)
"""
with pymysql_con.cursor() as cur:
cur.executemany(q, df_scores[["dt", "score_value", "source_id", "station_id"]].values.tolist())
pymysql_con.commit()
print("uploaded week %s done" % week)
def upload_all():
""" Drop all fraunhofer data before reuploading """
q = """
DELETE FROM sources WHERE id LIKE '%fraunhofer%';
DELETE FROM stations WHERE source_id LIKE '%fraunhofer%';
DELETE FROM scores WHERE source_id LIKE '%fraunhofer%';
"""
with pymysql_con.cursor() as cur:
cur.execute(q)
pymysql_con.commit()
start = datetime(2020,1,1)
week = start.isocalendar()[1]
current_week = datetime.now().isocalendar()[1]
while week <= current_week:
upload_week(str(week).zfill(2))
week = week + 1
upload_all()
# upload for today
#current_week = datetime.now().isocalendar()[1]
#upload_week(str(week).zfill(2))
pymysql_con.close()
| 34.054299 | 406 | 0.680973 |
69afcb6111e2b727f9b4db4fba7fb9a04892dfe5
| 1,099 |
py
|
Python
|
time/plot.py
|
gray0018/Normal-integration-benchmark
|
3f4fff86e659ae2a3588c0960ebb0af39e4a1e21
|
[
"MIT"
] | null | null | null |
time/plot.py
|
gray0018/Normal-integration-benchmark
|
3f4fff86e659ae2a3588c0960ebb0af39e4a1e21
|
[
"MIT"
] | null | null | null |
time/plot.py
|
gray0018/Normal-integration-benchmark
|
3f4fff86e659ae2a3588c0960ebb0af39e4a1e21
|
[
"MIT"
] | null | null | null |
import numpy as np
import operator
import matplotlib.pyplot as plt
import json
import os
# directory = '.'
# d = {}
# for filename in os.listdir(directory):
# if filename.endswith(".npy"):
# t = np.load(filename)
# d[filename[:-4]] = float(t)
# j = json.dumps(d)
# f = open("woloop.json","w")
# f.write(j)
# f.close()
plt.style.use(['science','no-latex'])
with open('withloop.json') as json_file:
d_w = json.load(json_file)
d_w = dict(sorted(d_w.items(), key=operator.itemgetter(1)))
with open('woloop.json') as json_file:
d_wo = json.load(json_file)
d_wo = dict(sorted(d_wo.items(), key=operator.itemgetter(1)))
fig, axes = plt.subplots(figsize=(5,5))
axes.scatter(d_w.keys(), d_w.values())
axes.scatter(d_wo.keys(), d_wo.values())
axes.legend(['With nested loops','W/O nested loops'])
axes.set_ylabel('Time (s)', fontsize=16)
axes.set_xlabel('Model-resolution', fontsize=16)
chartBox = axes.get_position()
axes.set_position([chartBox.x0, chartBox.y0*2,
chartBox.width,
chartBox.height])
plt.xticks(rotation=90)
plt.show()
| 24.422222 | 61 | 0.658781 |
69ecdf48286aae4a1eb103b7ce4eaaa1dafeab2e
| 6,300 |
py
|
Python
|
src/onegov/fsi/views/course.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/fsi/views/course.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/fsi/views/course.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.core.html import html_to_text
from onegov.core.security import Private, Secret, Personal
from onegov.core.templates import render_template
from onegov.fsi import FsiApp
from onegov.fsi.collections.course import CourseCollection
from onegov.fsi.collections.course_event import CourseEventCollection
from onegov.fsi.forms.course import CourseForm, InviteCourseForm
from onegov.fsi import _
from onegov.fsi.layouts.course import CourseCollectionLayout, CourseLayout, \
AddCourseLayout, EditCourseLayout, InviteCourseLayout, \
CourseInviteMailLayout
from onegov.fsi.models import CourseAttendee
from onegov.fsi.models.course import Course
from onegov.fsi.models.course_notification_template import \
CourseInvitationTemplate, CourseNotificationTemplate
from onegov.user import User
def handle_send_invitation_email(
self, course, request, recipients, cc_to_sender=True):
"""Recipients must be a list of emails"""
if not recipients:
request.alert(_("There are no recipients matching the selection"))
else:
att = request.attendee
if cc_to_sender and att.id not in recipients:
recipients = list(recipients)
recipients.append(att.id)
mail_layout = CourseInviteMailLayout(course, request)
errors = []
for email in recipients:
attendee = request.session.query(
CourseAttendee).filter_by(_email=email).first()
if not attendee:
user = request.session.query(User).filter_by(
username=email).first()
if not user:
errors.append(email)
continue
attendee = user.attendee
if not attendee:
# This is a case that should not happen except in testing
errors.append(email)
continue
content = render_template('mail_notification.pt', request, {
'layout': mail_layout,
'title': self.subject,
'notification': self.text_html,
'attendee': attendee,
})
plaintext = html_to_text(content)
request.app.send_marketing_email(
receivers=(attendee.email,),
subject=self.subject,
content=content,
plaintext=plaintext,
)
request.success(_(
"Successfully sent the e-mail to ${count} recipients",
mapping={
'count': len(recipients) - len(errors)
}
))
if errors:
request.warning(
_('Following emails were unknown: ${mail_list}',
mapping={'mail_list': ', '.join(errors)})
)
return request
@FsiApp.html(
model=Course,
template='mail_notification.pt',
permission=Private,
name='embed')
def view_email_preview_for_course(self, request):
mail_layout = CourseInviteMailLayout(self, request)
template = CourseNotificationTemplate()
return {
'layout': mail_layout,
'title': template.subject,
'notification': template.text_html,
'attendee': request.attendee
}
@FsiApp.html(
model=CourseCollection,
template='courses.pt',
permission=Personal
)
def view_course_collection(self, request):
layout = CourseCollectionLayout(self, request)
layout.include_accordion()
return {
'layout': layout,
'model': self,
}
@FsiApp.form(
model=CourseCollection,
template='form.pt',
name='add',
form=CourseForm,
permission=Secret
)
def view_add_course_event(self, request, form):
layout = AddCourseLayout(self, request)
layout.include_editor()
if form.submitted(request):
course = self.add(**form.get_useful_data())
request.success(_("Added a new course"))
return request.redirect(request.link(course))
return {
'layout': layout,
'model': self,
'form': form
}
@FsiApp.html(
model=Course,
template='course.pt',
permission=Personal
)
def view_course_event(self, request):
layout = CourseLayout(self, request)
return {
'layout': layout,
'model': self,
'events': self.future_events.all()
}
@FsiApp.json(
model=Course,
permission=Personal,
name='content-json'
)
def get_course_event_content(self, request):
return self.description_html
@FsiApp.form(
model=Course,
template='form.pt',
name='edit',
form=CourseForm,
permission=Secret
)
def view_edit_course_event(self, request, form):
layout = EditCourseLayout(self, request)
layout.include_editor()
if form.submitted(request):
form.update_model(self)
request.success(_("Your changes were saved"))
return request.redirect(request.link(self))
if not form.errors:
form.apply_model(self)
return {
'layout': layout,
'model': self,
'form': form,
'button_text': _('Update')
}
@FsiApp.form(
model=Course,
template='course_invite.pt',
form=InviteCourseForm,
name='invite',
permission=Private
)
def invite_attendees_for_event(self, request, form):
layout = InviteCourseLayout(self, request)
if form.submitted(request):
recipients = form.get_useful_data()
request = handle_send_invitation_email(
CourseInvitationTemplate(),
self,
request,
recipients,
cc_to_sender=False
)
return request.redirect(request.link(self))
return {
'layout': layout,
'model': self,
'form': form,
'button_text': _('Send Invitation'),
'email': CourseInvitationTemplate(),
'iframe_link': request.link(self, name='embed')
}
@FsiApp.view(
model=Course,
request_method='DELETE',
permission=Secret
)
def delete_course(self, request):
request.assert_valid_csrf_token()
if not self.events.first():
CourseEventCollection(request.session).delete(self)
request.success(_('Course successfully deleted'))
else:
request.warning(_('This course has events and can not be deleted'))
| 27.038627 | 77 | 0.628254 |
38dce8febadffc6a8a290ffb214c1ae017cc58f2
| 4,673 |
py
|
Python
|
RDS/circle1_adapters_and_ports/port_openscienceframework/tests/server/example.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 10 |
2020-06-24T08:22:24.000Z
|
2022-01-13T16:17:36.000Z
|
RDS/circle1_adapters_and_ports/port_openscienceframework/tests/server/example.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 78 |
2020-01-23T14:32:06.000Z
|
2022-03-07T14:11:16.000Z
|
RDS/circle1_adapters_and_ports/port_openscienceframework/tests/server/example.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 1 |
2020-06-24T08:33:48.000Z
|
2020-06-24T08:33:48.000Z
|
import json
# Use this to initialize a `Project` instance
node_json = """
{
"data": {
"relationships": {
"files": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/files/",
"meta": {}
}
}
},
"view_only_links": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/view_only_links/",
"meta": {}
}
}
},
"citation": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/citation/",
"meta": {}
}
}
},
"license": {
"links": {
"related": {
"href": "https://api.osf.io/v2/licenses/563c1ffbda3e240129e72c03/",
"meta": {}
}
}
},
"contributors": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/contributors/",
"meta": {}
}
}
},
"forks": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/forks/",
"meta": {}
}
}
},
"root": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/",
"meta": {}
}
}
},
"identifiers": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/identifiers/",
"meta": {}
}
}
},
"comments": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/comments/?filter%5Btarget%5D=f3szh",
"meta": {}
}
}
},
"registrations": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/registrations/",
"meta": {}
}
}
},
"logs": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/logs/",
"meta": {}
}
}
},
"node_links": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/node_links/",
"meta": {}
}
}
},
"linked_nodes": {
"links": {
"self": {
"href": "https://api.osf.io/v2/nodes/f3szh/relationships/linked_nodes/",
"meta": {}
},
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/linked_nodes/",
"meta": {}
}
}
},
"wikis": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/wikis/",
"meta": {}
}
}
},
"affiliated_institutions": {
"links": {
"self": {
"href": "https://api.osf.io/v2/nodes/f3szh/relationships/institutions/",
"meta": {}
},
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/institutions/",
"meta": {}
}
}
},
"children": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/children/",
"meta": {}
}
}
},
"preprints": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/preprints/",
"meta": {}
}
}
},
"draft_registrations": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/f3szh/draft_registrations/",
"meta": {}
}
}
}
},
"links": {
"self": "https://api.osf.io/v2/nodes/f3szh/",
"html": "https://osf.io/f3szh/"
},
"attributes": {
"category": "project",
"fork": false,
"preprint": true,
"description": "this is a test for preprint citations",
"current_user_permissions": [
"read"
],
"date_modified": "2017-03-17T16:11:35.721000",
"title": "Preprint Citations Test",
"collection": false,
"registration": false,
"date_created": "2017-03-17T16:09:14.864000",
"current_user_can_comment": false,
"node_license": {
"copyright_holders": [],
"year": "2017"
},
"public": true,
"tags": [
"qatest"
]
},
"type": "nodes",
"id": "f3szh"
}
}
"""
def _build_node(type_):
node = json.loads(node_json)
node["data"]["type"] = type_
return node
| 23.482412 | 91 | 0.395035 |
c7fa2d7b6279fbc16ddf225ebbef4fbcd6439d6d
| 2,204 |
py
|
Python
|
Grundlagen/Python/Minensucher/minensucher.py
|
jneug/schule-projekte
|
4f1d56d6bb74a47ca019cf96d2d6cc89779803c9
|
[
"MIT"
] | 2 |
2020-09-24T12:11:16.000Z
|
2022-03-31T04:47:24.000Z
|
Grundlagen/Python/Minensucher/minensucher.py
|
jneug/schule-projekte
|
4f1d56d6bb74a47ca019cf96d2d6cc89779803c9
|
[
"MIT"
] | 1 |
2021-02-27T15:06:27.000Z
|
2021-03-01T16:32:48.000Z
|
Grundlagen/Python/Minensucher/minensucher.py
|
jneug/schule-projekte
|
4f1d56d6bb74a47ca019cf96d2d6cc89779803c9
|
[
"MIT"
] | 1 |
2021-02-24T05:12:35.000Z
|
2021-02-24T05:12:35.000Z
|
from random import randint
FELD_BREITE = 15
FELD_HOEHE = 10
ANZAHL_MINEN = randint(
int(FELD_BREITE * FELD_HOEHE * 0.1), int(FELD_BREITE * FELD_HOEHE * 0.2)
)
WIDTH = FELD_BREITE * 20
HEIGHT = FELD_HOEHE * 20
feld = []
def minen_verteilen(anzahl):
for i in range(FELD_BREITE):
feld.append([])
for j in range(FELD_HOEHE):
if anzahl > 0 and randint(0, 10) < 3:
feld[i].append("X")
anzahl -= 1
else:
feld[i].append(0)
def anzahl_anpassen(i, j):
for x in range(3):
for y in range(3):
new_i = i - 1 + x
new_j = j - 1 + y
if new_i >= 0 and new_i < FELD_BREITE and new_j >= 0 and new_j < FELD_HOEHE:
if feld[new_i][new_j] != "X":
feld[new_i][new_j] += 1
def minen_zaehlen():
for i in range(FELD_BREITE):
for j in range(FELD_HOEHE):
cell = feld[i][j]
if cell == "X":
anzahl_anpassen(i, j)
sprites = []
def feld_aufbauen():
for i in range(FELD_BREITE):
for j in range(FELD_HOEHE):
inhalt = feld[i][j]
if inhalt == "X":
bomb_sprite = Actor("bomb")
bomb_sprite.center = (i * 20 + 10, j * 20 + 10)
sprites.append(bomb_sprite)
feld_sprite = Actor("feld")
feld_sprite.topleft = (i * 20, j * 20)
sprites.append(feld_sprite)
minen_verteilen(ANZAHL_MINEN)
minen_zaehlen()
feld_aufbauen()
def draw():
screen.clear()
for i in range(FELD_BREITE):
for j in range(FELD_HOEHE):
inhalt = feld[i][j]
screen.draw.textbox(str(inhalt), Rect((i*20,j*20), (20,20)))
for sprite in sprites:
sprite.draw()
def on_mouse_down(pos, button):
if button == mouse.LEFT:
for sprite in sprites:
if sprite.collidepoint(pos):
sprites.remove(sprite)
i, j = int(pos[0] / 20), int(pos[1] / 20)
if feld[i][j] == 'X':
print("Bombe!")
else:
print(feld[i][j])
| 25.929412 | 89 | 0.503176 |
2d83515d3b2c0545f64c14bb473a19cac246deff
| 168 |
py
|
Python
|
nz_django/day1/urls_include_demo/book/urls.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | null | null | null |
nz_django/day1/urls_include_demo/book/urls.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 27 |
2020-02-12T07:55:58.000Z
|
2022-03-12T00:19:09.000Z
|
nz_django/day1/urls_include_demo/book/urls.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 2 |
2020-02-18T01:54:55.000Z
|
2020-02-21T11:36:28.000Z
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('detail/<int:book_id>/', views.book_detail,{'name':'kangbazi'}),
]
| 24 | 73 | 0.666667 |
933cb13bc7fe5bd1b62885cb8b25ce8a810ed468
| 2,082 |
py
|
Python
|
Packs/CrowdStrikeFalcon/Scripts/ReadNetstatFile/ReadNetstatFile.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | null | null | null |
Packs/CrowdStrikeFalcon/Scripts/ReadNetstatFile/ReadNetstatFile.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | 40 |
2022-03-03T07:34:00.000Z
|
2022-03-31T07:38:35.000Z
|
Packs/CrowdStrikeFalcon/Scripts/ReadNetstatFile/ReadNetstatFile.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | null | null | null |
from CommonServerPython import *
COMMAND_NAME = 'netstat'
def get_netstat_file_name(command_files):
if command_files and isinstance(command_files, dict):
netstat_files = command_files.get(COMMAND_NAME, [])
if netstat_files:
if isinstance(netstat_files, list):
# we want to get the last file name
return netstat_files[len(netstat_files) - 1].get('Filename')
elif isinstance(netstat_files, dict):
return netstat_files.get('Filename') # type:ignore
def get_file_name_from_context() -> str:
crowdstrike_context = demisto.context().get('CrowdStrike', {})
all_command_files = []
if isinstance(crowdstrike_context, list):
for ctx in crowdstrike_context:
if cmd_ctx := ctx.get('Command'):
all_command_files.append(cmd_ctx)
elif isinstance(crowdstrike_context, dict) and (cmd_ctx := crowdstrike_context.get('Command')):
all_command_files.append(cmd_ctx)
for command_file in all_command_files[::-1]: # get last file in context
if file_name := get_netstat_file_name(command_file):
return file_name
return ""
def get_file_entry_id(file_name):
file_entry_id = ""
if file_name:
entries = demisto.executeCommand('getEntries', {})
for entry in entries:
file_entry = demisto.get(entry, 'File')
is_correct_file = file_name.lower() == file_entry.lower()
if is_correct_file:
file_entry_id = entry['ID']
break
return file_entry_id
def get_file_content(file_entry_id):
if file_entry_id:
res = execute_command('getFilePath', {'id': file_entry_id})
file_path = res.get('path')
with open(file_path, 'r') as f:
file_content = f.read()
return file_content
def main():
file_name = get_file_name_from_context()
if file_name:
demisto.results(get_file_content(get_file_entry_id(file_name)))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| 33.580645 | 99 | 0.650336 |
35c47dafeebb19ee587dafd46dfb9444757360e8
| 352 |
py
|
Python
|
Packs/CommonScripts/Scripts/DumpJSON/DumpJSON.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/CommonScripts/Scripts/DumpJSON/DumpJSON.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/CommonScripts/Scripts/DumpJSON/DumpJSON.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import json
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
def main():
key = demisto.args()['key']
obj_str = json.dumps(demisto.get(demisto.context(), key))
demisto.setContext('JsonStr', obj_str)
return_results(obj_str)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| 22 | 61 | 0.6875 |
35d1681b32652ae64d777846da7fc45306f656ec
| 476 |
py
|
Python
|
___Python/Angela/PyKurs/p07_file_io/m01_count_files.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Angela/PyKurs/p07_file_io/m01_count_files.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Angela/PyKurs/p07_file_io/m01_count_files.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
# Zaehle die Anzahl Ordner in einem Ordner (inkl. allen Unterordnern)
def count_dirs(path):
subdirs = [subdir for subdir in path.iterdir() if subdir.is_dir()] #Bestimme die direkten Unterordner des Ordners path
count = 1 # Spielwiese selbst
for subdir in subdirs:
count += count_dirs(subdir) # fuer jedes einzelne Kind
return count
count = count_dirs(Path("O:\Spielwiese"))
print(count)
# Iterative Lösung
| 29.75 | 123 | 0.701681 |
ea420c4d8691173ecb3f646afb940711d48ce24a
| 484 |
py
|
Python
|
experiment/ea/make_tweet_tokenized_data_master.py
|
iecasszyjy/tweet_search-master
|
e4978521a39964c22ae46bf35d6ff17710e8e6c6
|
[
"MIT"
] | null | null | null |
experiment/ea/make_tweet_tokenized_data_master.py
|
iecasszyjy/tweet_search-master
|
e4978521a39964c22ae46bf35d6ff17710e8e6c6
|
[
"MIT"
] | 2 |
2021-03-31T18:54:16.000Z
|
2021-12-13T19:49:08.000Z
|
experiment/ea/make_tweet_tokenized_data_master.py
|
iecasszyjy/tweet_search-master
|
e4978521a39964c22ae46bf35d6ff17710e8e6c6
|
[
"MIT"
] | null | null | null |
from tqdm import tqdm
import pymongo
import redis
import json
from bson import json_util
client = pymongo.MongoClient('localhost:27017')
db = client.tweet
r = redis.StrictRedis(host='localhost', port=6379, db=0)
events = [e for e in db.current_event.find({},{'_id':1})]
def send_message(eid):
message = {'event_id':json.dumps(eid,default=json_util.default)}
r.rpush('task:data',json.dumps(message))
if __name__ == '__main__':
[send_message(e['_id']) for e in tqdm(events)]
| 25.473684 | 65 | 0.725207 |
57741b8c449b1a45735b70561ed716e153f13b4e
| 943 |
py
|
Python
|
venv/Lib/site-packages/pynance/tst/unit/data/test_compare.py
|
LeonardoHMS/imobi
|
6b2b97a05df67ea7d493f7b601382f65c6629cc2
|
[
"MIT"
] | 35 |
2015-03-12T04:16:14.000Z
|
2020-12-17T18:10:15.000Z
|
venv/Lib/site-packages/pynance/tst/unit/data/test_compare.py
|
LeonardoHMS/imobi
|
6b2b97a05df67ea7d493f7b601382f65c6629cc2
|
[
"MIT"
] | 31 |
2015-03-16T21:31:04.000Z
|
2021-01-26T00:12:34.000Z
|
venv/Lib/site-packages/pynance/tst/unit/data/test_compare.py
|
LeonardoHMS/imobi
|
6b2b97a05df67ea7d493f7b601382f65c6629cc2
|
[
"MIT"
] | 18 |
2015-09-30T10:40:26.000Z
|
2021-01-25T21:20:44.000Z
|
"""
Tests for performance comparison functions.
Copyright (c) 2016 Marshall Farrier
license http://opensource.org/licenses/MIT
"""
import unittest
import numpy as np
import pandas as pd
import pynance as pn
class TestCompare(unittest.TestCase):
def test_compare(self):
rng = pd.date_range('2016-03-28', periods=4)
eqs = ('SCTY', 'SPWR')
eq_dfs = [pd.DataFrame(index=rng, columns=['Close']) for i in range(len(eqs))]
eq_dfs[0].iloc[:, 0] = [2., 4., 6., 8.]
eq_dfs[1].iloc[:, 0] = [4., 4., 2., 6.]
rel_perf = pn.data.compare(eq_dfs, eqs)
self.assertTrue((rng == rel_perf.index).all(), 'incorrect index')
self.assertTrue((list(eqs) == list(rel_perf)), 'incorrect column labels')
self.assertTrue(np.allclose(np.array([[1., 2., 3., 4.], [1., 1., .5, 1.5]]).T, rel_perf.to_numpy()),
'incorrect values')
if __name__ == '__main__':
unittest.main()
| 30.419355 | 108 | 0.610817 |
57de2bd1b9fbd5865d36e8b81853868fd4548e1b
| 620 |
py
|
Python
|
comp/yelp/onsite2/MinCoinNum_swirl_streaming.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 1 |
2015-12-16T04:01:03.000Z
|
2015-12-16T04:01:03.000Z
|
comp/yelp/onsite2/MinCoinNum_swirl_streaming.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 1 |
2016-02-09T06:00:07.000Z
|
2016-02-09T07:20:13.000Z
|
comp/yelp/onsite2/MinCoinNum_swirl_streaming.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 2 |
2019-06-27T09:07:26.000Z
|
2019-07-01T04:40:13.000Z
|
# Think about another ways
def mincoin(coins, target):
# coins: positive nums
coins.sort()
cand = set(coins)
minnums = [0] * target
cnt = 0
while len(cand) != 0:
cnt += 1
newcand = set()
for c in cand:
minnums[c] = cnt
for coin in coins:
idx = c + coin
if idx < target:
print
2
if minnums[idx] == 0:
newcand.add(idx)
else:
break
cand = newcand
return cnt
print
mincoin([1, 2], 3)
| 20.666667 | 41 | 0.41129 |
17b32a03eacb3b7786a65a8d9678832d9e175f53
| 857 |
py
|
Python
|
tools/pythonpkg/tests/fast/arrow/test_multiple_reads.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 2,816 |
2018-06-26T18:52:52.000Z
|
2021-04-06T10:39:15.000Z
|
tools/pythonpkg/tests/fast/arrow/test_multiple_reads.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 1,310 |
2021-04-06T16:04:52.000Z
|
2022-03-31T13:52:53.000Z
|
tools/pythonpkg/tests/fast/arrow/test_multiple_reads.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 270 |
2021-04-09T06:18:28.000Z
|
2022-03-31T11:55:37.000Z
|
import duckdb
import os
try:
import pyarrow
import pyarrow.parquet
can_run = True
except:
can_run = False
class TestArrowReads(object):
def test_multiple_queries_same_relation(self, duckdb_cursor):
if not can_run:
return
parquet_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)),'data','userdata1.parquet')
cols = 'id, first_name, last_name, email, gender, ip_address, cc, country, birthdate, salary, title, comments'
userdata_parquet_table = pyarrow.parquet.read_table(parquet_filename)
userdata_parquet_table.validate(full=True)
rel = duckdb.from_arrow_table(userdata_parquet_table)
assert(rel.aggregate("(avg(salary))::INT").execute().fetchone()[0] == 149005)
assert(rel.aggregate("(avg(salary))::INT").execute().fetchone()[0] == 149005)
| 38.954545 | 118 | 0.697783 |
aa0112abd3623d59b95270b5dbc850092d2947d8
| 101 |
py
|
Python
|
python_lessons/python_test/second_python.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
python_lessons/python_test/second_python.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
python_lessons/python_test/second_python.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
# second python test
# write via GNU nano MacBook Pro MMartin
n = 10
for i in range(n):
print(i)
| 12.625 | 40 | 0.683168 |
aa293795336e1733f07e56c3faa8c985e7a7dfa2
| 1,111 |
py
|
Python
|
src/python3_learn_video/BIF_closure.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
src/python3_learn_video/BIF_closure.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
src/python3_learn_video/BIF_closure.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
# global 关键字
print('-----------------------------------')
count = 5
def MyFun():
count = 10
print(count)
MyFun()
print(count)
print('-----------------------------------')
def MyFun():
global count
count = 10
print(count)
MyFun()
print(count)
print('-----------------------------------')
def fun1():
print('fun1()正在被调用...')
def fun2():
print('func2()正在被调用...')
fun2()
fun1()
print('-----------------------------------')
def FunX(x):
def FunY(y):
return x * y
return FunY
i = FunX(8)
print(i)
print(type(i))
print('-----------------------------------')
print(i(5))
print('-----------------------------------')
print(FunX(8)(5))
print('-----------------------------------')
def Fun1():
x = [5]
def Fun2():
x[0] *= x[0]
return x[0]
return Fun2()
print(Fun1())
print('-----------------------------------')
def Fun1():
x = 5
def Fun2():
nonlocal x
x *= x
return x
return Fun2()
print(Fun1())
print('-----------------------------------')
| 13.070588 | 44 | 0.338434 |
a4b16b9a02b8e34f41e02a71006951401e22f714
| 1,009 |
py
|
Python
|
Packs/PaloAltoNetworks_IoT3rdParty/Scripts/GeneratePANWIoTDeviceTableQueryForServiceNow/GeneratePANWIoTDeviceTableQueryForServiceNow.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/PaloAltoNetworks_IoT3rdParty/Scripts/GeneratePANWIoTDeviceTableQueryForServiceNow/GeneratePANWIoTDeviceTableQueryForServiceNow.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/PaloAltoNetworks_IoT3rdParty/Scripts/GeneratePANWIoTDeviceTableQueryForServiceNow/GeneratePANWIoTDeviceTableQueryForServiceNow.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
def main():
device_list = demisto.args().get('devices')
query_strs = []
query_str = 'mac_addressIN'
DEFAULT_VALUE_SIZE = 100 # each query contains 100 deviceid
res = {}
output_description = f'Total data length is {len(device_list)}'
for i, entry in enumerate(device_list):
query_str += entry['deviceid'] + ','
if ((i + 1) % DEFAULT_VALUE_SIZE == 0 or i == (len(device_list) - 1)):
query_strs.append(query_str[0:len(query_str) - 1])
query_str = 'mac_addressIN'
res['query'] = query_strs
output_description = f'{output_description} total number of query is {len(query_strs)}'
results = CommandResults(
readable_output=output_description,
outputs_prefix="PanwIot3rdParty.Query",
outputs=res
)
return results
if __name__ in ['__main__', 'builtin', 'builtins']:
res = main()
return_results(res)
| 30.575758 | 91 | 0.650149 |
1035e02df02cb8357fc290dc2aba63b6c1ba4281
| 1,640 |
py
|
Python
|
backend/utils/id_generator.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 10 |
2020-03-20T19:14:43.000Z
|
2020-10-29T21:31:40.000Z
|
backend/utils/id_generator.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 41 |
2020-03-20T20:27:55.000Z
|
2020-03-24T21:49:37.000Z
|
backend/utils/id_generator.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 1 |
2020-03-21T09:31:51.000Z
|
2020-03-21T09:31:51.000Z
|
import random
import time
from hashlib import sha1
random.seed()
WORDLIST = {
'adjective': [
'angenehm', 'attraktiv', 'aufmerksam', 'bunt', 'blau', 'charmant',
'dankbar', 'edel', 'frei', 'gelb', 'glatt', 'hell', 'ideal', 'jung',
'leicht', 'lieb', 'luftig', 'mutig', 'nah', 'neu', 'offen', 'poetisch',
'rein', 'rund', 'sicher', 'treu', 'wach', 'warm', 'weich', 'zart',
'zentral', 'zivil'
],
'noun': [
'amulett', 'arm', 'ball', 'baum', 'dach', 'eimer', 'engel', 'film',
'foto', 'freiheit', 'haus', 'insel', 'kugel', 'liebe', 'mutter',
'maus', 'nase', 'natur', 'obst', 'orgel', 'papier', 'quelle', 'radio',
'ritter', 'sand', 'stein', 'uhr', 'vater', 'vogel', 'wasser', 'zahn'
],
'verb': [
'atmen', 'baden', 'bilden', 'danken', 'deuten', 'essen', 'haben',
'heilen', 'hoffen', 'jubeln', 'kreisen', 'lachen', 'leben', 'leuchten',
'loben', 'lohnen', 'malen', 'mischen', 'ordnen', 'planen', 'pfeifen',
'reden', 'rollen', 'sehen', 'stehen', 'teilen', 'trinken', 'wollen',
'zelten'
]
}
def generate_place_id():
"""
Returns:
- String: Human-readable id phrase
"""
return random.choice(WORDLIST['adjective']) + \
random.choice(WORDLIST['noun']) + \
random.choice(WORDLIST['verb'])
def generate_queue_id(queue_name):
hasher = sha1()
hasher.update(queue_name.encode('utf-8'))
name_hash = hasher.hexdigest()[:4]
time_stamp = str(int(time.time()))[-2:]
return name_hash + time_stamp
def generate_entry_id(name):
return generate_queue_id(name)
| 33.469388 | 79 | 0.54939 |
f4e559ef07486c1df08e4e0937f04c329444508d
| 2,126 |
py
|
Python
|
books/PythonCleanCode/ch6_descriptors/descriptors_methods_4.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonCleanCode/ch6_descriptors/descriptors_methods_4.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonCleanCode/ch6_descriptors/descriptors_methods_4.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
"""Clean Code in Python - Chapter 6: Descriptors
> Methods of the descriptor interface: __set_name__
"""
from log import logger
class DescriptorWithName:
"""This descriptor requires the name to be explicitly set."""
def __init__(self, name):
self.name = name
def __get__(self, instance, owner):
if instance is None:
return self
logger.info("getting %r attribute from %r", self.name, instance)
return instance.__dict__[self.name]
def __set__(self, instance, value):
instance.__dict__[self.name] = value
class ClientClass:
"""
>>> client = ClientClass()
>>> client.descriptor = "value"
>>> client.descriptor
'value'
>>> ClientClass.descriptor_2.name
"a name that doesn't match the attribute"
"""
descriptor = DescriptorWithName("descriptor")
descriptor_2 = DescriptorWithName("a name that doesn't match the attribute")
class DescriptorWithAutomaticName(DescriptorWithName):
"""This descriptor can infer the name of the attribute, if not provided.
It also supports setting a different name explicitly.
"""
def __init__(self, name: str = None) -> None:
self.name = name
def __set_name__(self, owner, name):
self.name = self.name or name
class NewClientClass:
"""
>>> NewClientClass.descriptor_with_default_name.name
'descriptor_with_default_name'
>>> NewClientClass.named_descriptor.name
'named_descriptor'
>>> NewClientClass.descriptor_named_differently.name
'a_different_name'
>>> client = NewClientClass()
>>> client.descriptor_named_differently = "foo"
>>> client.__dict__["a_different_name"]
'foo'
>>> client.descriptor_named_differently
'foo'
>>> client.a_different_name
'foo'
"""
descriptor_with_default_name = DescriptorWithAutomaticName()
named_descriptor = DescriptorWithAutomaticName("named_descriptor")
descriptor_named_differently = DescriptorWithAutomaticName(
"a_different_name"
)
| 26.575 | 81 | 0.660395 |
df4ccadd48ef65ce73e0ea13f4e72f45bf8f773d
| 381 |
py
|
Python
|
INBa/2015/RotkinAM/Zadacha_2_21.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/RotkinAM/Zadacha_2_21.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/RotkinAM/Zadacha_2_21.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
# Задача 2. Вариант 21.
# Напишите программу, которая будет выводить на экран наиболее понравившееся вам высказывание, автором которого является Леонардо да Винчи. Не забудьте о том, что автор должен быть упомянут на отдельной строке.
# Rotkin A.M.
# 02.04.2016
print ('За сладкое приходтся горько расплачиваться')
print ("\nЛеонардо да Винчи")
input ("Нажмите Enter для выхода")
| 42.333333 | 210 | 0.779528 |
df762fc47e8e0a8f06d11c71436c63aab26e2183
| 398 |
py
|
Python
|
api/app.py
|
singhprincejeet/in_poster
|
1b0e18631ebede94e679eb0aba6c8e7630a02aba
|
[
"MIT"
] | null | null | null |
api/app.py
|
singhprincejeet/in_poster
|
1b0e18631ebede94e679eb0aba6c8e7630a02aba
|
[
"MIT"
] | 4 |
2021-04-30T21:09:19.000Z
|
2022-03-12T00:19:12.000Z
|
api/app.py
|
singhprincejeet/in_poster
|
1b0e18631ebede94e679eb0aba6c8e7630a02aba
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, send_file
from main_controller import MainController
app = Flask(__name__)
@app.route('/generate', methods=['POST'])
def generate():
image_src = MainController().generate_image(request)
return send_file(image_src)
@app.route('/ping', methods=['GET'])
def ping():
return 'pong'
if __name__ == '__main__':
app.run(host='0.0.0.0')
| 23.411765 | 57 | 0.673367 |
33a49dcb8909ea1581dc4d90f918e7844abada00
| 223 |
py
|
Python
|
WD/Cwiczenia/Kwadraty_w_petli.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | 1 |
2020-02-29T14:38:33.000Z
|
2020-02-29T14:38:33.000Z
|
WD/Cwiczenia/Kwadraty_w_petli.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
WD/Cwiczenia/Kwadraty_w_petli.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
#2 ćwiczenia
#zadanie 7
ile=input("Podaj ile chcesz wczytać liczb: ")
ile=int(ile)
for i in range(ile):
liczba=input("Podaj liczbę numer "+str(i)+": ")
liczba=int(liczba)
print(str(liczba**2))
| 18.583333 | 52 | 0.609865 |
8938bff55d9a25b0a08b3653bc1b868f9bcab4d6
| 716 |
py
|
Python
|
30_days_of_Code/15_linked_list.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
30_days_of_Code/15_linked_list.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
30_days_of_Code/15_linked_list.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
class Node:
def __init__(self, data):
self.data = data
self.next = None
class Solution:
def display(self, head):
current = head
while current:
print(current.data, end=' ')
current = current.next
def insert(self, head, data):
if head:
cur = head
while cur.next:
cur = cur.next
N = Node(data=data)
cur.next = N
return head
else:
head = Node(data=data)
return head
mylist= Solution()
T=int(input())
head=None
for i in range(T):
data=int(input())
head=mylist.insert(head, data)
mylist.display(head);
| 20.457143 | 40 | 0.519553 |
98359bcafb43958d81e407a2ccbc55ca959dfeb4
| 10,932 |
py
|
Python
|
XlsxTools/xls2json/Tools/xls2json.py
|
maplelearC/Unity3DTraining
|
3824d5f92c5fce5cbd8806feb1852e9a99e4a711
|
[
"MIT"
] | 3,914 |
2017-01-20T04:55:53.000Z
|
2022-03-31T18:06:12.000Z
|
XlsxTools/xls2json/Tools/xls2json.py
|
maplelearC/Unity3DTraining
|
3824d5f92c5fce5cbd8806feb1852e9a99e4a711
|
[
"MIT"
] | 5 |
2019-12-17T05:27:58.000Z
|
2022-01-20T11:55:33.000Z
|
XlsxTools/xls2json/Tools/xls2json.py
|
maplelearC/Unity3DTraining
|
3824d5f92c5fce5cbd8806feb1852e9a99e4a711
|
[
"MIT"
] | 1,263 |
2017-01-15T09:54:44.000Z
|
2022-03-31T14:59:11.000Z
|
# -*- coding: utf-8 -*-
import os,sys,importlib
import xml.etree.ElementTree as ET
import xdrlib,xlrd
# 防止中文乱码
importlib.reload(sys)
#配置文件名
CONFIG_NAME = "config.ini"
#保存文件类型
SAVE_FILE_TYPE = ".json"
#保存映射类类型
SAVE_MAPPING_TYPE = ".cs"
#分隔符
SPLIT_CAHR = ":"
#表格路径
XLS_PATH = ""
#解析路径
XML_PATH = ""
#导出路径
OUT_PATH = ""
#映射路径
MAP_PATH = ""
#映射总数据类分表内容
MAPPING_CONTENT = ""
#读取配置
def read_config():
print("开始读取配置文件")
config_file = open(CONFIG_NAME, "r", encoding = "utf-8")
#表格路径
cur_line = config_file.readline().rstrip("\r\n").split(SPLIT_CAHR)
global XLS_PATH
XLS_PATH = os.path.abspath(cur_line[1])
print("表格路径:", XLS_PATH)
#解析路径
cur_line = config_file.readline().rstrip("\r\n").split(SPLIT_CAHR)
global XML_PATH
XML_PATH = os.path.abspath(cur_line[1])
print("解析路径", XML_PATH)
#导出路径
cur_line = config_file.readline().rstrip("\r\n").split(SPLIT_CAHR)
global OUT_PATH
OUT_PATH = os.path.abspath(cur_line[1])
print("导出路径", OUT_PATH)
#映射路径
cur_line = config_file.readline().rstrip("\r\n").split(SPLIT_CAHR)
global MAP_PATH
MAP_PATH = os.path.abspath(cur_line[1])
print("映射路径", MAP_PATH)
config_file.close()
#删除导出目录原文件
def delect_old_file():
print("删除导出目录原文件")
file_list = os.listdir(OUT_PATH)
for file in file_list:
#只删除JSON文件
if file.endswith(SAVE_FILE_TYPE):
os.remove(OUT_PATH + "\\" + file)
print("删除映射目录原文件")
file_list = os.listdir(MAP_PATH)
for file in file_list:
#只删除C#文件
if file.endswith(SAVE_MAPPING_TYPE):
os.remove(MAP_PATH + "\\" + file)
#转换文件
def change_file():
print("开始转换文件")
file_list = os.listdir(XML_PATH)
for file in file_list:
if file.endswith(".xml"):
#拼接XML路径
xml_file_path = XML_PATH + "\\" + file
isSucc = parse_file_by_xml(xml_file_path)
if (False == isSucc):
print("出错了!!!!!!!!!!!!!!!!!!")
return
def parse_file_by_xml(xml_file_path):
#解析XML
try:
tree = ET.parse(xml_file_path)
#获得根节点
root = tree.getroot()
except Exception as e:
print("解析{0}失败!!!!!!!!!!!!".format(xml_file_path))
sys.exit()
return False
#解析内容
if root.tag == "config":
xls_file_list = []
save_file_name = ""
element_list = []
for child in root:
if child.tag == "input":
#要转换的表格
for input_child in child:
xls_file_list.append(input_child.get("file"))
elif child.tag == "output":
#输出文件名称
save_file_name = child.get("name")
elif child.tag == "elements":
#列表转换
element_list = child
#转换数据
return change_file_by_xml_data(xls_file_list, element_list, save_file_name)
else:
print("找不到config节点 {0}".format(xml_file_path))
return False
#开始转换表格
def change_file_by_xml_data(xls_file_list, element_list, save_file_name):
#主键检查
primary_key = None
primary_type = None
for element in element_list:
if "true" == element.get("primary"):
if None == primary_key:
primary_key = element.get("name")
primary_type = element.get("type")
else:
print("存在多个主键")
return False
if None == primary_key:
print("没有主键")
return False
all_value_list = {}
for xls_file in xls_file_list:
xls_file_path = XLS_PATH + "\\" + xls_file
print("转换文件{0}".format(xls_file_path))
#打开表格
xls_data = None
try:
xls_data = xlrd.open_workbook(xls_file_path)
except Exception as e:
print(str(e))
return False
#读取sheet1的数据
xls_table = xls_data.sheets()[0]
nrows = xls_table.nrows #行数
ncols = xls_table.ncols #列数
#转换为XML中的数据
key_list = xls_table.row_values(0)
for row_index in range(1, nrows):
row_values = xls_table.row_values(row_index)
#将数据转存为字典
value_dic = {}
for col_index in range(0, ncols):
for element in element_list:
if key_list[col_index] == element.get("key"):
if "int" == element.get("type"):
value_dic[element.get("name")] = int(row_values[col_index])
elif "string" == element.get("type"):
value_dic[element.get("name")] = str(row_values[col_index])
else:
value_dic[element.get("name")] = str(row_values[col_index])
break
#设置主键
primary_value = str(value_dic[primary_key])
if primary_value in all_value_list:
print("存在重复的主键")
return False
all_value_list[primary_value] = value_dic
#释放内存
xls_data.release_resources()
#拼接为JSON字符串
JSON_STR = str(all_value_list).replace("\'", "\"")
#拼接类名
file_name = "Table" + save_file_name[0].upper() + save_file_name[1:]
#存储为JSON文件
save_to_json(JSON_STR, file_name)
#生成C#映射类
save_to_mapping(file_name, element_list, primary_type)
return True
#存储为JSON文件
def save_to_json(str, file_name):
save_file_path = OUT_PATH + "\\" + file_name + SAVE_FILE_TYPE
print("输出文件:" + save_file_path)
file_object = open(save_file_path, 'w', encoding = "utf-8")
file_object.write(str)
file_object.close()
#生成C#映射类
def save_to_mapping(file_name, element_list, primary_type):
table_content_frame = "public class " + file_name + " {{\n{0}{1}\n}}"
table_content_field = ""
constructor_content = ""
constructor_params = None
constructor_assign = None
mapping_single_content = create_single_table_mapping_content(file_name)
mapping_json_value = None
#映射类成员
for element in element_list:
field_name = element.get("name")
type_str = element.get("type")
field_str = "\n\t//列名[{0}] Type[{1}]\n\tpublic {2} " + field_name + " = {3};\n"
define_value_str = None
if "int" == type_str:
define_value_str = 0
elif "string" == type_str:
define_value_str = "\"\""
if None != type_str:
#填充
key_name_str = element.get("key")
table_content_field = table_content_field + field_str.format(key_name_str, type_str, type_str, define_value_str)
if None != constructor_params:
constructor_params = constructor_params + ", " + type_str + " " + field_name
constructor_assign = constructor_assign + "\n\t\tthis.{0} = {1};".format(field_name, field_name)
mapping_json_value = mapping_json_value + (", ({0})json.Value[\"{1}\"]").format(type_str, field_name)
else:
constructor_params = type_str + " " + field_name
constructor_assign = "\t\tthis.{0} = {1};".format(field_name, field_name)
mapping_json_value = "({0})json.Value[\"{1}\"]".format(type_str, field_name)
#可以创建构造函数
if None != constructor_params:
#构造函数
constructor_content = ("\n\t//构造函数\n\tpublic " + file_name + "({0})\n\t{{\n{1}\n\t}}").format(constructor_params, constructor_assign)
#映射总数据
global MAPPING_CONTENT
prime_key_trans = "null"
if "int" == primary_type:
prime_key_trans = "int.Parse(json.Key)"
elif "string" == primary_type:
prime_key_trans = "json.Key"
MAPPING_CONTENT = MAPPING_CONTENT + mapping_single_content.format(prim_key_type = primary_type, prime_key_trans = prime_key_trans, json_value = mapping_json_value)
save_file_path = MAP_PATH + "\\" + file_name + SAVE_MAPPING_TYPE
print("输出映射类:" + save_file_path)
file_object = open(save_file_path, 'w', encoding = "utf-8")
file_object.write(table_content_frame.format(table_content_field, constructor_content))
file_object.close()
#生成单个映射总数据内容
def create_single_table_mapping_content(file_name):
content = ""
content = content + "\n\n\t//{xml_name}"
content = content + "\n\tprivate Dictionary<{{prim_key_type}}, {file_name}> {lower_file_name}Dic = new Dictionary<{{prim_key_type}}, {file_name}>();"
content = content + "\n\t//初始化{xml_name}字典"
content = content + "\n\tprivate void Init{file_name}()"
content = content + "\n\t{{{{"
content = content + "\n\t\tJObject jsonData = JsonManager.GetTableJson(\"{file_name}\");"
content = content + "\n\t\tforeach (var json in jsonData)"
content = content + "\n\t\t{{{{"
content = content + "\n\t\t\t{{prim_key_type}} key = {{prime_key_trans}};"
content = content + "\n\t\t\tvar jsonValue = json.Value;"
content = content + "\n\t\t\t{file_name} value = new {file_name}({{json_value}});"
content = content + "\n\t\t\t{lower_file_name}Dic.Add(key, value);"
content = content + "\n\t\t}}}}"
content = content + "\n\t}}}}"
content = content + "\n\t//通过主键值获取{xml_name}数据"
content = content + "\n\tpublic {file_name} Get{file_name}ByPrimKey({{prim_key_type}} primKey)"
content = content + "\n\t{{{{"
content = content + "\n\t\tif (0 == {lower_file_name}Dic.Count) Init{file_name}();"
content = content + "\n\t\t//获取数据"
content = content + "\n\t\t{file_name} {lower_file_name}Data = null;"
content = content + "\n\t\t{lower_file_name}Dic.TryGetValue(primKey, out {lower_file_name}Data);"
content = content + "\n\t\treturn {lower_file_name}Data;"
content = content + "\n\t}}}}"
return content.format(xml_name = file_name[5:], file_name = file_name, lower_file_name = file_name[0].lower() + file_name[1:])
#创建映射总数据文件
def craete_table_mapping_cs():
mapping_frame = ""
mapping_frame = mapping_frame + "using System.Collections.Generic;"
mapping_frame = mapping_frame + "\nusing Newtonsoft.Json.Linq;"
mapping_frame = mapping_frame + "\n\npublic class TableMapping"
mapping_frame = mapping_frame + "\n{{\n{0}{1}\n}}"
mapping_ins = ""
mapping_ins = mapping_ins + "//单例"
mapping_ins = mapping_ins + "\n\tprivate TableMapping() { }"
mapping_ins = mapping_ins + "\n\tprivate static TableMapping _ins;"
mapping_ins = mapping_ins + "\n\tpublic static TableMapping Ins { get { if (null == _ins) { _ins = new TableMapping(); } return _ins; } }"
#保存文件
save_file_path = MAP_PATH + "\\TableMappnig" + SAVE_MAPPING_TYPE
file_object = open(save_file_path, 'w', encoding = "utf-8")
file_object.write(mapping_frame.format(mapping_ins, MAPPING_CONTENT))
file_object.close()
def main():
read_config()
delect_old_file()
change_file()
craete_table_mapping_cs()
if __name__ == "__main__":
main()
| 36.198675 | 171 | 0.608214 |
9846d7a929dfc7206aaef3926e2eb249dc72eda7
| 30 |
py
|
Python
|
notebooks/utils/tui/__init__.py
|
k4t0mono/ipln
|
ba71860bc38df52780903f647fb2404c61a6b3f2
|
[
"BSD-2-Clause"
] | 1 |
2021-03-15T11:53:40.000Z
|
2021-03-15T11:53:40.000Z
|
python/progress/__init__.py
|
pedromxavier/cookbook
|
243532f893651c34e70fbba8a52f3f129dbc8dd3
|
[
"MIT"
] | 2 |
2020-03-24T17:06:03.000Z
|
2020-03-31T02:16:40.000Z
|
python/progress/__init__.py
|
pedromxavier/cookbook
|
243532f893651c34e70fbba8a52f3f129dbc8dd3
|
[
"MIT"
] | null | null | null |
from .progress import Progress
| 30 | 30 | 0.866667 |
120b99a0975e33345360483fef0a78b371c52141
| 2,633 |
py
|
Python
|
ai/split-chinese/jieba-base.py
|
veaba/ncov
|
6019f6b90761fd39363f8a7182ffcee22b9cb7ed
|
[
"MIT"
] | 288 |
2020-01-21T06:12:03.000Z
|
2022-01-16T08:03:13.000Z
|
ai/split-chinese/jieba-base.py
|
veaba/ncov
|
6019f6b90761fd39363f8a7182ffcee22b9cb7ed
|
[
"MIT"
] | 26 |
2020-01-20T05:07:31.000Z
|
2022-03-12T00:24:56.000Z
|
ai/split-chinese/jieba-base.py
|
veaba/ncov
|
6019f6b90761fd39363f8a7182ffcee22b9cb7ed
|
[
"MIT"
] | 48 |
2020-01-22T09:05:59.000Z
|
2022-01-16T08:03:11.000Z
|
# encoding=utf-8
import jieba.posseg as psg
# string="来到北京清华大学"
string ="""
4月12日0—24时,31个省(自治区、直辖市)和新疆生产建设兵团报告新增确诊病例108例,其中98例为境外输入病例,10例为本土病例(黑龙江7例,广东3例);新增死亡病例2例(湖北2例);新增疑似病例6例,均为境外输入病例(黑龙江4例,上海2例)。
当日新增治愈出院病例88例,解除医学观察的密切接触者1092人,重症病例减少18例。
境外输入现有确诊病例867例(含重症病例38例),现有疑似病例72例。累计确诊病例1378例,累计治愈出院病例511例,无死亡病例。
截至4月12日24时,据31个省(自治区、直辖市)和新疆生产建设兵团报告,现有确诊病例1156例(其中重症病例121例),累计治愈出院病例77663例,累计死亡病例3341例,累计报告确诊病例82160例,现有疑似病例72例。累计追踪到密切接触者719908人,尚在医学观察的密切接触者9655人。
湖北无新增确诊病例,新增治愈出院病例57例(武汉57例),新增死亡病例2例(武汉2例),现有确诊病例244例(武汉243例),其中重症病例75例(武汉74例)。累计治愈出院病例64338例(武汉47186例),累计死亡病例3221例(武汉2579例),累计确诊病例67803例(武汉50008例)。无新增疑似病例,无现有疑似病例。
31个省(自治区、直辖市)和新疆生产建设兵团报告新增无症状感染者61例,其中境外输入无症状感染者12例;当日转为确诊病例28例(境外输入28例);当日解除医学观察55例(境外输入9例);尚在医学观察无症状感染者1064例(境外输入307例)。
累计收到港澳台地区通报确诊病例1437例:香港特别行政区1004例(出院360例,死亡4例),澳门特别行政区45例(出院13例),台湾地区388例(出院109例,死亡6例)。
"""
seg_list=psg.cut(string)
# print("精确模式===>","| ".join(seg_list))
# 结果===》
"""
精确模式===>
| | 4| 月| 12| 日| 0| —| 24| 时| ,| 31| 个省| (| 自治区| 、| 直辖市| )| 和| 新疆生产建设兵团| 报告| 新增| 确诊| 病例| 108| 例| ,| 其中| 98| 例为| 境外| 输入| 病例| ,| 10| 例为| 本土| 病例| (| 黑龙江| 7| 例| ,| 广东| 3| 例| )|
;| 新增| 死亡| 病例| 2| 例| (| 湖北| 2| 例| )| ;| 新增| 疑似病例| 6| 例| ,| 均| 为| 境外| 输入| 病例| (| 黑龙江| 4| 例| ,| 上海| 2| 例| )| 。|
| | | 当日| 新增| 治愈| 出院| 病例| 88| 例| ,| 解除| 医学观察| 的| 密切接触| 者| 1092| 人| ,| 重症| 病例| 减少| 18| 例| 。|
| | | 境外| 输入| 现有| 确诊| 病例| 867| 例| (| 含| 重症| 病例| 38| 例| )| ,| 现有| 疑似病例| 72| 例| 。| 累计| 确诊| 病例| 1378| 例| ,| 累计| 治愈| 出院| 病例| 511| 例| ,| 无| 死亡| 病例| 。|
| | | 截至| 4| 月| 12| 日| 24| 时| ,| 据| 31| 个省| (| 自治区| 、| 直辖市| )| 和| 新疆生产建设兵团| 报告| ,| 现有| 确诊| 病例| 1156| 例| (| 其中| 重症| 病例| 121| 例| )| ,| 累计| 治愈| 出院| 病例| 77663| 例| ,| 累计| 死亡| 病
例| 3341| 例| ,| 累计| 报告| 确诊| 病例| 82160| 例| ,| 现有| 疑似病例| 72| 例| 。| 累计| 追踪| 到| 密切接触| 者| 719908| 人| ,| 尚| 在| 医学观察| 的| 密切接触| 者| 9655| 人| 。|
| | | 湖北| 无| 新增| 确诊| 病例| ,| 新增| 治愈| 出院| 病例| 57| 例| (| 武汉| 57| 例| )| ,| 新增| 死亡| 病例| 2| 例| (| 武汉| 2| 例| )| ,| 现有| 确诊| 病例| 244| 例| (| 武汉| 243| 例| )| ,| 其中| 重症| 病例| 75| 例| (| 武汉
| 74| 例| )| 。| 累计| 治愈| 出院| 病例| 64338| 例| (| 武汉| 47186| 例| )| ,| 累计| 死亡| 病例| 3221| 例| (| 武汉| 2579| 例| )| ,| 累计| 确诊| 病例| 67803| 例| (| 武汉| 50008| 例| )| 。| 无| 新增| 疑似病例| ,| 无| 现有| 疑似病例
| 。|
| | | 31| 个省| (| 自治区| 、| 直辖市| )| 和| 新疆生产建设兵团| 报告| 新增| 无症状| 感染者| 61| 例| ,| 其中| 境外| 输入| 无症状| 感染者| 12| 例| ;| 当日| 转为| 确诊| 病例| 28| 例| (| 境外| 输入| 28| 例| )| ;| 当日| 解除| 医学观
察| 55| 例| (| 境外| 输入| 9| 例| )| ;| 尚| 在| 医学观察| 无症状| 感染者| 1064| 例| (| 境外| 输入| 307| 例| )| 。|
| | | 累计| 收到| 港澳台地区| 通报| 确诊| 病例| 1437| 例| :| 香港特别行政区| 1004| 例| (| 出院| 360| 例| ,| 死亡| 4| 例| )| ,| 澳门特别行政区| 45| 例| (| 出院| 13| 例| )| ,| 台湾地区| 388| 例| (| 出院| 109| 例| ,| 死亡| 6|
例| )| 。|
|
"""
for i in seg_list:
print("==>",type(i),i)
| 65.825 | 179 | 0.550703 |
89d9e48927ec828fd9208dc357f86ea67a28a09c
| 793 |
py
|
Python
|
scripts/add_come.py
|
belamu/kanthaus.online
|
de84010a77e60156cbefb8e014ac6290540ded69
|
[
"CC0-1.0",
"MIT"
] | 6 |
2018-09-03T15:48:19.000Z
|
2021-09-27T12:04:04.000Z
|
scripts/add_come.py
|
belamu/kanthaus.online
|
de84010a77e60156cbefb8e014ac6290540ded69
|
[
"CC0-1.0",
"MIT"
] | 13 |
2017-12-25T20:44:37.000Z
|
2020-10-30T09:37:10.000Z
|
scripts/add_come.py
|
belamu/kanthaus.online
|
de84010a77e60156cbefb8e014ac6290540ded69
|
[
"CC0-1.0",
"MIT"
] | 14 |
2018-01-05T19:54:40.000Z
|
2021-03-24T10:16:31.000Z
|
#!/usr/bin/env python3
# Usage: ./scripts/add_come.py
url = "https://codi.kanthaus.online/come/download"
import urllib.request
with urllib.request.urlopen(url) as response:
markdown = response.read().decode()
import yaml
parts = markdown.split('---')
frontmatter = parts[1]
frontmatter = yaml.safe_load(frontmatter)
date = frontmatter['date']
destination_directory = 'user/pages/40.governance/90.minutes/{}_CoMe'.format(date)
import os
import sys
if os.path.isdir(destination_directory):
print(destination_directory, 'already exists! Exiting...')
sys.exit(1)
os.mkdir(destination_directory)
destination_file = os.path.join(destination_directory, 'item.md')
with open(destination_file, 'w+') as f:
f.write(markdown)
print('Done! Type `git status` to see the changes!')
| 27.344828 | 82 | 0.741488 |
143a71f0538e957183a42d1dc096bccb0dd0d05c
| 345 |
py
|
Python
|
Problems/Two Pointers/easy/MergeStringAlternately/test_merge_string_alternately.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | 1 |
2021-08-16T14:52:05.000Z
|
2021-08-16T14:52:05.000Z
|
Problems/Two Pointers/easy/MergeStringAlternately/test_merge_string_alternately.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
Problems/Two Pointers/easy/MergeStringAlternately/test_merge_string_alternately.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from merge_string_alternately import mergeAlternately
class Test(TestCase):
def test_merge_alternately(self):
self.assertEqual(mergeAlternately("abc", "pqr"), "apbqcr")
self.assertEqual(mergeAlternately("ab", "pqrs"), "apbqrs")
self.assertEqual(mergeAlternately("abcd", "pq"), "apbqcd")
| 43.125 | 66 | 0.727536 |
1ae63296d699c65a0bd047816b0e603a4cff99eb
| 5,841 |
py
|
Python
|
hplc_analysis.py
|
furubayashim/hplc-analysis-hitachi
|
d8f2b594b577032548e860b424f55241bbe72b37
|
[
"MIT"
] | null | null | null |
hplc_analysis.py
|
furubayashim/hplc-analysis-hitachi
|
d8f2b594b577032548e860b424f55241bbe72b37
|
[
"MIT"
] | null | null | null |
hplc_analysis.py
|
furubayashim/hplc-analysis-hitachi
|
d8f2b594b577032548e860b424f55241bbe72b37
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# script to draw PDA chromatogram & spectrum figure using Hitachi HPLC Chromaster stx/ctx files
import pandas as pd
import numpy as np
import glob
import os
import sys
import matplotlib.pyplot as plt
# change this if using different user/folder
data_dir = "raw/"
# can give sample name file as argv
if len(sys.argv) >1:
samplenamefile = sys.argv[1]
else:
samplenamefile = 'sampletable.xlsx'
sample_df = pd.read_excel(samplenamefile)
### load parameter from the xls file ####################################
sample_nos = [str(s) for s in sample_df['sample no'].values]
sample_names = sample_df['name'].values
sample_dir = sorted([f+'/' for f in os.listdir(data_dir) if not os.path.isfile(f)],key=lambda x:int(x[:-1]))
# Time range (x axis)
start_time = 2
end_time = 18
if 'start time' in sample_df.columns:
start_time = sample_df['start time'].values[0]
if 'end time' in sample_df.columns:
end_time = sample_df['end time'].values[0]
# which chart to draw
all_chromato = sample_df['all chromato'].values[0]
each_data = sample_df['each data'].values[0]
# output folder and name
if not os.path.exists('processed'): os.mkdir('processed')
output_name = 'all_chromato'
if 'output name' in sample_df.columns:
output_name = sample_df['output name'].values[0]
### draw chromato for all samples in one fig ############################
if all_chromato == 'y':
ctx_files = sorted(glob.glob(data_dir+'*/*.ctx'),key=lambda x: (int(x.split('/')[1]),int(x.split('/')[2][:-4]))) #ごちゃごちゃ
chromato_dfs = [pd.read_csv(file,skiprows=38,delimiter=';',header=None,names=[sample_names[n],'NaN']).iloc[:,:1] for n,file in enumerate(ctx_files)]
chromato_df = pd.concat(chromato_dfs,axis=1)
chromato_df_cut = chromato_df.loc[start_time:end_time]
fig,axes = plt.subplots(1,2,figsize=[10,8])
for n,(name,col) in enumerate(chromato_df_cut.iteritems()):
time = chromato_df_cut.index.values
abs = col.values - 0.1 * n
axes[0].plot(time,abs,label=name)
axes[0].legend()
axes[0].set_ylabel('Absorbance')
axes[0].set_xlabel('Time (min)')
#axes[0].set_ylim([-0.45,0.1])
axes[0].set_xlim([start_time,end_time])
axes[0].set_title('Height as it is')
for n,(name,col) in enumerate(chromato_df_cut.iteritems()):
abs = col.values / np.nanmax(col.values) - 1.1 * n
time = chromato_df_cut.index.values
axes[1].plot(time,abs,label=name)
axes[1].legend()
axes[1].set_ylabel('Absorbance (Normalized)')
axes[1].set_xlabel('Time (min)')
#axes[1].set_ylim([-0.45,1])
axes[1].set_xlim([start_time,end_time])
axes[1].set_title('Height Normalized')
plt.savefig("processed/{}.pdf".format(output_name),bbox_inches = "tight");
### draw chromato/spec for each sample ############################
if each_data == 'y':
for sample_no,sample_name,sample_dir in zip(sample_nos,sample_names,sample_dir):
# load chromato files. Can import several ctx file
ctx_files = sorted(glob.glob(data_dir+sample_dir+'*.ctx'))
chromato_dfs = [pd.read_csv(file,skiprows=38,delimiter=';',header=None,names=[os.path.basename(file)[:-4],'NaN']).iloc[:,:1] for file in ctx_files]
chromato_df = pd.concat(chromato_dfs,axis=1)
if chromato_df.index.min() < start_time:
chromato_df_cut = chromato_df.loc[start_time:]
else:
chromato_df_cut = chromato_df
if chromato_df_cut.index.max() > end_time:
chromato_df_cut = chromato_df_cut.loc[:end_time]
# load stx files
stx_files = sorted(glob.glob(data_dir+sample_dir+'*.stx'),key=lambda x: float(os.path.basename(x[:-4])))
stx_dfs = [pd.read_csv(f,delimiter=';',skiprows=44).iloc[:,:1] for f in stx_files]
stx_df = pd.concat(stx_dfs,axis=1)
# stx_df is the dataframe of the abs spectrum of each peak.
# index = 200-650 (nm)
# column name = str of time (min)
stx_df_cut = stx_df.loc[250:600] # select 250-600 nm
# draw figure
fig = plt.figure(figsize=[6,16])
# draw chromatogram
ymax = 0
ymin = 0
for name,col in chromato_df_cut.iteritems():
time = chromato_df_cut.index.values
abs = col.values
plt.subplot(6,1,1)
#109: MatplotlibDeprecationWarning: Adding an axes using the same arguments as a previous axes currently reuses the earlier instance. In a future version, a new instance will always be created and returned. Meanwhile, this warning can be suppressed, and the future behavior ensured, by passing a unique label to each axes instance.
plt.plot(time,abs,label=name)
ymaxtemp = chromato_df.loc[start_time:end_time,name].values.max()
ymintemp = chromato_df.loc[start_time:end_time,name].values.min()
if ymaxtemp > ymax: ymax = ymaxtemp
if ymintemp < ymin: ymin = ymintemp
plt.legend()
plt.xticks(np.arange(start_time,end_time,1))
plt.xlabel('Time (min)')
plt.ylabel('Absorbance')
plt.ylim([ymin + ymin*0.05,ymax + ymax*0.05])
plt.title(sample_no + '-' + sample_name)
# draw abs spectrum
for n,(rt,series) in enumerate(stx_df_cut.iteritems()):
wavelength = series.index.values
absorbance = series.values
abs_max = str(int(series.idxmax()))
plt.subplot(12,3,7+n)
plt.plot(wavelength,absorbance,label=rt)
plt.xlim([250,600])
plt.xticks(np.arange(300,700,100))
plt.ylim([series.min(),series.max()])
plt.title('{} min (λmax: {} nm)'.format(rt[:-2],abs_max))
plt.tight_layout(pad=-0.1);
plt.savefig('processed/'+sample_no+'-'+sample_name+'.pdf',bbox_inches = "tight");
| 41.721429 | 349 | 0.640644 |
2ee382ef7e4e52fef0695c81d210f8710fbaae22
| 26,869 |
py
|
Python
|
kiosk/models.py
|
AndiBr/ffksk
|
ff4bc4ad26d4571eaa1a6ff815b2e6a876f8ba99
|
[
"MIT"
] | null | null | null |
kiosk/models.py
|
AndiBr/ffksk
|
ff4bc4ad26d4571eaa1a6ff815b2e6a876f8ba99
|
[
"MIT"
] | 14 |
2018-09-12T06:59:55.000Z
|
2020-02-26T07:17:48.000Z
|
kiosk/models.py
|
AndiBr/ffksk
|
ff4bc4ad26d4571eaa1a6ff815b2e6a876f8ba99
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
from dateutil import tz
import pytz
from datetime import date
from django.core.validators import MinValueValidator
from django.db import transaction
from profil.models import KioskUser
from django.db import connection
from django.conf import settings
from django.template.loader import render_to_string
from .queries import readFromDatabase
from django.db.models import Max
# Create your models here.
class Start_News(models.Model):
heading = models.CharField(max_length=256)
date = models.DateTimeField(default=timezone.now)
content = models.TextField(max_length=2048, blank=True)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
starred = models.BooleanField(default=False)
visible = models.BooleanField(default=True)
def __str__(self):
return(str(self.date) + ': ' + str(self.heading))
class Kontakt_Nachricht(models.Model):
name = models.CharField(max_length=40)
email = models.EmailField('E-Mail-Adresse')
gesendet = models.DateTimeField(auto_now_add=True)
betreff = models.TextField(max_length=128, blank=True)
text = models.TextField(max_length=1024)
beantwortet = models.BooleanField(default=False)
def __str__(self):
return ('Von: ' + str(self.name) + ': '+str(self.betreff))
class Produktpalette(models.Model):
produktName = models.CharField(max_length=40)
imVerkauf = models.BooleanField()
inAufstockung = models.BooleanField(default=True)
produktErstellt = models.DateTimeField(auto_now_add=True)
produktGeaendert = models.DateTimeField(auto_now=True)
#kommentar = models.TextField(max_length=512,blank=True)
farbeFuerPlot = models.TextField(max_length=7,blank=True)
def __str__(self):
return ('ID ' + str(self.id) + ': ' + self.produktName)
class Produktkommentar(models.Model):
produktpalette = models.ForeignKey(Produktpalette, on_delete=models.CASCADE)
erstellt = models.DateTimeField(auto_now_add=timezone.now)
kommentar = models.TextField(max_length=512,blank=True)
def __str__(self):
return (self.produktpalette.produktName + ' (' + str(self.erstellt) + ' )')
class Kioskkapazitaet(models.Model):
produktpalette = models.OneToOneField(
Produktpalette,on_delete=models.CASCADE
,primary_key=True)
maxKapazitaet = models.IntegerField(validators=[MinValueValidator(0)])
schwelleMeldung = models.IntegerField(validators=[MinValueValidator(0)])
paketgroesseInListe = models.IntegerField(validators=[MinValueValidator(0)])
def __str__(self):
return(self.produktpalette.produktName +
", Kapazit"+chr(228)+"t: " + str(self.maxKapazitaet))
class ProduktVerkaufspreise(models.Model):
produktpalette = models.ForeignKey(
Produktpalette, on_delete=models.CASCADE)
verkaufspreis = models.IntegerField(validators=[MinValueValidator(0)])
preisAufstockung = models.IntegerField(validators=[MinValueValidator(0)])
gueltigAb = models.DateTimeField(default=timezone.now)
def __str__(self):
price = '%.2f' % (self.verkaufspreis/100)
aufstockung = '%.2f' % (self.preisAufstockung/100)
return(self.produktpalette.produktName + ", " +
str(price) + "(+"+str(aufstockung)+") "+chr(8364)+" g"+chr(252)+"ltig ab " + str(self.gueltigAb))
def getActPrices(produkt_id):
verkaufspreis = readFromDatabase('getActPrices',[produkt_id])
return(verkaufspreis[0])
def getPreisAufstockung(produkt_id):
aufstockung = readFromDatabase('getPreisAufstockung',[produkt_id])
return(aufstockung[0])
class Einkaufsliste(models.Model):
kiosk_ID = models.AutoField(primary_key=True)
produktpalette = models.ForeignKey(
Produktpalette,on_delete=models.CASCADE)
bedarfErstelltUm = models.DateTimeField(auto_now_add=timezone.now)
def __str__(self):
return("[#" + str(self.kiosk_ID) + "] " +
self.produktpalette.produktName + ", Bedarf angemeldet um " +
str(self.bedarfErstelltUm))
def getEinkaufsliste():
einkaufsliste = readFromDatabase('getEinkaufsliste')
return(einkaufsliste)
# Eine Gruppe in der Einkaufsliste wird zum Einkauf vorgemerkt
@transaction.atomic
def einkaufGroupVormerken(ekGroupID,user):
# Suchen von Gruppen in EinkaufslisteGroups und dann die IDs in Einkaufsliste
groupEntries = EinkaufslisteGroups.objects.filter(gruppenID=ekGroupID)
for grEntry in groupEntries:
grEntryID = grEntry.einkaufslistenItem_id
ekItem = Einkaufsliste.objects.get(kiosk_ID=grEntryID)
vg = ZumEinkaufVorgemerkt(kiosk_ID=ekItem.kiosk_ID, bedarfErstelltUm=ekItem.bedarfErstelltUm,
produktpalette_id=ekItem.produktpalette_id, einkaufsvermerkUm=timezone.now(),
einkaeufer_id = user)
vg.save()
Einkaufsliste.objects.get(kiosk_ID=grEntryID).delete()
EinkaufslisteGroups.objects.filter(gruppenID=ekGroupID).delete()
return True
def getCommentsOnProducts(ekGroupID):
# Gebe die Kommentare aller Produkte zurueck
comments = readFromDatabase('getCommentsOnProductsInEkList',[ekGroupID])
return comments
class EinkaufslisteGroups(models.Model):
einkaufslistenItem = models.OneToOneField(Einkaufsliste, to_field='kiosk_ID', on_delete=models.CASCADE)
gruppenID = models.IntegerField()
def __str__(self):
return("Element: [#" + str(self.einkaufslistenItem.kiosk_ID) + "] Gruppe " +
str(self.gruppenID))
class ZumEinkaufVorgemerkt(models.Model):
kiosk_ID = models.AutoField(primary_key=True)
produktpalette = models.ForeignKey(
Produktpalette,on_delete=models.CASCADE)
bedarfErstelltUm = models.DateTimeField()
einkaufsvermerkUm = models.DateTimeField(auto_now_add=timezone.now)
einkaeufer = models.ForeignKey(
KioskUser,on_delete=models.CASCADE)
def __str__(self):
return("[#" + str(self.kiosk_ID) + "] " +
self.produktpalette.produktName + ", vorgemerkt um " +
str(self.einkaufsvermerkUm) + ", von " + str(self.einkaeufer))
def getMyZumEinkaufVorgemerkt(currentUserID):
persEinkaufsliste = readFromDatabase('getMyZumEinkaufVorgemerkt',[currentUserID])
return(persEinkaufsliste)
@transaction.atomic
def einkaufAnnehmen(form, currentUser):
retVal = {'product_id': None, 'err': False, 'msg': None, 'html': None, 'dct': None, 'angeliefert': None}
finanz = getattr(settings,'FINANZ')
product_id= form['product_id']
product = Produktpalette.objects.get(id=product_id)
retVal['product_id'] = product_id
prodVkPreis = ProduktVerkaufspreise.getActPrices(product_id)
prodVkPreis = prodVkPreis.get('verkaufspreis')
retVal['err'] = False
userID = form['userID']
anzahlAngeliefert = form['anzahlAngeliefert']
gesPreis = form['gesPreis']
# Get the maximal number of products to accept
persEkList = ZumEinkaufVorgemerkt.getMyZumEinkaufVorgemerkt(userID)
anzahlElemente = [x['anzahlElemente'] for x in persEkList if x['id']==product_id][0]
# Pruefen, ob nicht mehr einkgekauft wurde, als auf der Liste stand
if anzahlAngeliefert > anzahlElemente:
retVal['msg'] = "Die Menge der angelieferten Ware ist zu gro"+chr(223)+" f"+chr(252)+"r '"+product.produktName+"'"
retVal['err'] = True
# Pruefen, dass die Kosten niedrig genug sind, so dass eine Marge zwischen Einkauf und Verkauf von 10 % vorhanden ist.
minProduktMarge = finanz['minProduktMarge']
if float(gesPreis) > float(anzahlAngeliefert) * (1-float(minProduktMarge)) * float(prodVkPreis):
retVal['msg'] = "Die Kosten f"+chr(252)+"r den Einkauf von '"+product.produktName+"' sind zu hoch. Der Einkauf kann nicht angenommen werden."
retVal['err'] = True
if retVal['err'] == True:
# Bei Eingabefehler, Eine Alert-Meldung zurueck, dass Eingabe falsch ist
retVal['html'] = render_to_string('kiosk/fehler_message.html', {'message':retVal['msg']})
return retVal
# Hier am besten die <form> aufloesen und das manuell bauen, POST wie oben GET nutzen, der Token muss in die uebergebenen Daten im JavaScript mit rein.
else:
# Wenn Eingabe passt, dann wird der Einkaufspreis errechnet, zu den Produkten geschrieben und die Produkte in das Kiosk gelegt. Geldueberweisung von der Bank an den Einkaeufer
# Einkaufspreis berechnen
prodEkPreis = int(gesPreis / anzahlAngeliefert)
datum = timezone.now()
angeliefert = ZumEinkaufVorgemerkt.objects.filter(einkaeufer__id=userID,
produktpalette__id=product_id).order_by('kiosk_ID')[:anzahlAngeliefert]
if len(angeliefert) != anzahlAngeliefert:
raise ValueError
# Eintragen der Werte und Schreiben ins Kiosk
for an in angeliefert:
k = Kiosk(kiosk_ID=an.kiosk_ID,bedarfErstelltUm=an.bedarfErstelltUm,
produktpalette_id=an.produktpalette_id, einkaufsvermerkUm=an.einkaufsvermerkUm,
einkaeufer_id = an.einkaeufer_id, geliefertUm = datum,
verwalterEinpflegen_id = currentUser.id, einkaufspreis = prodEkPreis)
# Aufpassen, dass dann ein zweistelliger Nachkommawert eingetragen wird!
k.save()
an.delete()
# Gewinn und Gesamtrechnung berechnen
gewinnEK = finanz['gewinnEK']
provision = int(((float(prodVkPreis) * float(anzahlAngeliefert)) - float(gesPreis)) * float(gewinnEK))
paidPrice = gesPreis
gesPreis = gesPreis + provision
# Geldueberweisung von der Bank an den Einkaeufer
userBank = KioskUser.objects.get(username='Bank')
userAnlieferer = KioskUser.objects.get(id=userID)
GeldTransaktionen.doTransaction(userBank,userAnlieferer,gesPreis,datum,
"Erstattung Einkauf " + product.produktName + " (" + str(anzahlAngeliefert) + "x)" )#" um " + str(datum.astimezone(tz.tzlocal())))
# Aufpassen, dass dann ein zweistelliger Nachkommawert eingetragen wird!
retVal['dct'] = {'gesPreis':gesPreis/100,'userAnlieferer':userAnlieferer.username, 'produktName': product.produktName,'anzahlElemente':anzahlElemente}
retVal['angeliefert'] = angeliefert
retVal['msg'] = "Vom Produkt '"+str(product.produktName)+"' wurden "+str(anzahlAngeliefert)+' St'+chr(252)+'ck zum Preis von '+'%.2f'%(paidPrice/100)+' '+chr(8364)+' angeliefert.'
retVal['html'] = render_to_string('kiosk/success_message.html', {'message':retVal['msg']})
return retVal
class Kiosk(models.Model):
kiosk_ID = models.AutoField(primary_key=True)
produktpalette = models.ForeignKey(
Produktpalette,on_delete=models.CASCADE)
bedarfErstelltUm = models.DateTimeField()
einkaufsvermerkUm = models.DateTimeField()
einkaeufer = models.ForeignKey(
KioskUser,on_delete=models.CASCADE,related_name='kiosk_einkaeufer')
geliefertUm = models.DateTimeField(auto_now_add=timezone.now)
verwalterEinpflegen = models.ForeignKey(
KioskUser,on_delete=models.CASCADE,related_name='kiosk_verwalter')
einkaufspreis = models.IntegerField(validators=[MinValueValidator(0)])
def __str__(self):
price = '%.2f' % (self.einkaufspreis/100)
return("[#" + str(self.kiosk_ID) + "] " +
self.produktpalette.produktName + ", EK: " +
str(price) + " "+chr(8364)+", um " +
str(self.geliefertUm) + ', von ' + str(self.einkaeufer) + ' (' + str(self.verwalterEinpflegen) + ')')
def getKioskContent():
kioskItems = readFromDatabase('getKioskContent')
return(kioskItems)
def getKioskContentForInventory():
kioskItems = readFromDatabase('getKioskContentForInventory')
return(kioskItems)
# Kauf eines Produkts auf 'kauf_page'
@transaction.atomic
def buyItem(wannaBuyItem,user,gekauft_per='ubk', buyAndDonate=False):
retVals = {'success': False, 'msg': [], 'product': wannaBuyItem, 'price': 0, 'hasDonated': False, 'donation': 0}
# First, look in Kiosk.
try:
item = Kiosk.objects.filter(produktpalette__produktName=wannaBuyItem)[:1].get()
foundInKiosk = True
except:
msg = 'Selected item is not in Kiosk anymore. But let\'s look into the bought items of "Dieb" ...'
print(msg)
retVals['msg'].append(msg)
foundInKiosk = False
# If not available in Kiosk, do Rueckbuchung from Dieb
if not foundInKiosk:
try:
itemBoughtByDieb = Gekauft.objects.filter(kaeufer__username='Dieb',produktpalette__produktName=wannaBuyItem)[:1].get()
except:
msg = 'No selecetd item has been found in the whole Kiosk to be bought.'
print(msg)
retVals['msg'].append(msg)
return retVals
# Book back the item from Dieb
dieb = KioskUser.objects.get(username='Dieb')
item = Gekauft.rueckbuchenOhneForm(dieb.id, itemBoughtByDieb.produktpalette.id, 1)
foundInKiosk = True
# Abfrage des aktuellen Verkaufspreis fuer das Objekt
actPrices = ProduktVerkaufspreise.getActPrices(item.produktpalette.id)
actPrices = actPrices.get('verkaufspreis')
donation = ProduktVerkaufspreise.getPreisAufstockung(item.produktpalette.id)
donation = donation.get('preisAufstockung')
# Check if user is allowed to buy something and has enough money
allowedConusmers = readFromDatabase('getUsersToConsume')
if user.id not in [x['id'] for x in allowedConusmers] and not user.username=='Dieb':
msg = 'Du bist nicht berechtigt, Produkte zu kaufen.'
print(msg)
retVals['msg'].append(msg)
return retVals
if not user.username=='Dieb':
konto = Kontostand.objects.get(nutzer = user)
if buyAndDonate:
if konto.stand - actPrices - donation < 0:
msg = 'Dein Kontostand ist zu niedrig, um dieses Produkt zu kaufen und eine Spende zu geben.'
print(msg)
retVals['msg'].append(msg)
return retVals
else:
if konto.stand - actPrices < 0:
msg = 'Dein Kontostand ist zu niedrig, um dieses Produkt zu kaufen.'
print(msg)
retVals['msg'].append(msg)
return retVals
# Ablage des Kaufs in Tabelle 'Gekauft'
g = Gekauft(kiosk_ID=item.kiosk_ID, produktpalette=item.produktpalette,
bedarfErstelltUm=item.bedarfErstelltUm, einkaufsvermerkUm=item.einkaufsvermerkUm,
einkaeufer=item.einkaeufer, geliefertUm=item.geliefertUm,
verwalterEinpflegen=item.verwalterEinpflegen, einkaufspreis=item.einkaufspreis,
gekauftUm = timezone.now(), kaeufer = user, verkaufspreis=actPrices, gekauft_per=gekauft_per)
# Produkt in Tabelle 'Kiosk' loeschen
Kiosk.objects.get(kiosk_ID=item.pk).delete()
# Automatische Geldtransaktion vom User zur Bank
userBank = KioskUser.objects.get(username='Bank')
GeldTransaktionen.doTransaction(g.kaeufer,userBank,g.verkaufspreis,g.gekauftUm,
"Kauf " + g.produktpalette.produktName)# + " um " + str(g.gekauftUm.astimezone(tz.tzlocal())))
if buyAndDonate and donation>0:
userSpendenkonto = KioskUser.objects.get(username='Spendenkonto')
GeldTransaktionen.doTransaction(
g.kaeufer,
userSpendenkonto,
donation,
g.gekauftUm,
"Spende durch Aufstockung von " + g.produktpalette.produktName)
g.save()
retVals['success'] = True
retVals['msg'].append('OK')
retVals['price'] = actPrices/100.0
retVals['hasDonated'] = buyAndDonate and donation>0
retVals['donation'] = donation/100.0
return retVals
class Gekauft(models.Model):
kiosk_ID = models.AutoField(primary_key=True)
produktpalette = models.ForeignKey(
Produktpalette,on_delete=models.CASCADE)
bedarfErstelltUm = models.DateTimeField()
einkaufsvermerkUm = models.DateTimeField()
einkaeufer = models.ForeignKey(
KioskUser,on_delete=models.CASCADE,related_name='gekauft_einkaeufer')
geliefertUm = models.DateTimeField()
verwalterEinpflegen = models.ForeignKey(
KioskUser,on_delete=models.CASCADE,related_name='gekauft_verwalter')
einkaufspreis = models.IntegerField(validators=[MinValueValidator(0)])
gekauftUm = models.DateTimeField(auto_now_add=timezone.now)
kaeufer = models.ForeignKey(
KioskUser,on_delete=models.CASCADE,related_name='gekauft_kaeufer')
# Verkaufspreis ist eigentlich nicht noetig, ergibt sich aus Relationen, die Dokumentationstabellen sollen aber sicherheitshalber diese Info speichern (zum Schutz vor Loesuchungen in anderen Tabellen).
verkaufspreis = models.IntegerField(validators=[MinValueValidator(0)])
kaufarten = (('slack','slack'),('web','web'),('ubk','unbekannt'),('dieb','dieb'))
gekauft_per = models.CharField(max_length=6,default='ubk',choices=kaufarten)
def __str__(self):
price = '%.2f' % (self.verkaufspreis/100)
return("[#" + str(self.kiosk_ID) + "] " +
self.produktpalette.produktName + ", VK: " +
str(price) + " "+chr(8364)+", gekauft von " +
str(self.kaeufer) + " um " + str(self.gekauftUm))
@transaction.atomic
def rueckbuchenOhneForm(userID,productID,anzahlZurueck):
dR = doRueckbuchung(userID,productID,anzahlZurueck)
return dR['item']
@transaction.atomic
def rueckbuchen(form):
userID = form.cleaned_data['kaeufer_id']
productID = form.cleaned_data['produkt_id']
anzahlZurueck = form.cleaned_data['anzahl_zurueck']
dR = doRueckbuchung(userID,productID,anzahlZurueck)
price = dR['price']
# Hole den Kioskinhalt
kioskItems = Kiosk.getKioskContent()
# Einkaufsliste abfragen
einkaufsliste = Einkaufsliste.getEinkaufsliste()
product = Produktpalette.objects.get(id=productID)
return {'userID':userID, 'anzahlZurueck': anzahlZurueck, 'price': price/100.0, 'product': product.produktName}
def doRueckbuchung(userID,productID,anzahlZurueck):
productsToMove = Gekauft.objects.filter(kaeufer__id=userID, produktpalette__id=productID).order_by('-gekauftUm')[:anzahlZurueck]
price = 0
newKioskItem = None
for item in productsToMove:
k = Kiosk(kiosk_ID=item.kiosk_ID, produktpalette=item.produktpalette,
bedarfErstelltUm=item.bedarfErstelltUm, einkaufsvermerkUm=item.einkaufsvermerkUm,
einkaeufer=item.einkaeufer, geliefertUm=item.geliefertUm,
verwalterEinpflegen=item.verwalterEinpflegen, einkaufspreis=item.einkaufspreis)
k.save()
k.geliefertUm = item.geliefertUm
k.save()
# Only the last item is taken!!
price = price + item.verkaufspreis
newKioskItem = k
userBank = KioskUser.objects.get(username='Bank')
user = KioskUser.objects.get(id=userID)
GeldTransaktionen.doTransaction(userBank,user,item.verkaufspreis,timezone.now,
"R"+chr(252)+"ckbuchung Kauf von " + item.produktpalette.produktName)
item.delete()
return {'price':price, 'item':newKioskItem}
from .bot import slack_MsgToUserAboutNonNormalBankBalance
class GeldTransaktionen(models.Model):
AutoTrans_ID = models.AutoField(primary_key=True)
vonnutzer = models.ForeignKey(
KioskUser, on_delete=models.CASCADE,related_name='nutzerVon')
zunutzer = models.ForeignKey(
KioskUser, on_delete=models.CASCADE,related_name='nutzerZu')
betrag = models.IntegerField(validators=[MinValueValidator(0)])
kommentar = models.TextField(max_length=512,blank=True)
datum = models.DateTimeField(auto_now_add=timezone.now)
def __str__(self):
betr = '%.2f' % (self.betrag/100)
return("[#" +
str(self.AutoTrans_ID) + "] " + str(betr) +
" "+chr(8364)+" von " + str(self.vonnutzer) + " an " +
str(self.zunutzer))
# Abfrage der Anzahl aller Transaktionen
def getLengthOfAllTransactions(user):
allTransactions = readFromDatabase('getLengthOfAllTransactions',[user.id, user.id])
return(allTransactions)
# Abfrage einer Auswahl an Transaktionen eines Nutzers zur Anzeige bei den Kontobewegungen
def getTransactions(user,page,limPP,maxIt):
if int(page)*int(limPP) > int(maxIt):
limPPn = int(limPP) - (int(page)*int(limPP) - int(maxIt))
else:
limPPn = limPP
allTransactions = readFromDatabase('getTransactions',
[user.id, user.id, int(page)*int(limPP), limPPn])
# Add TimeZone information: It is stored as UTC-Time in the SQLite-Database
for k,v in enumerate(allTransactions):
allTransactions[k]['datum'] = pytz.timezone('UTC').localize(v['datum'])
return(allTransactions)
@transaction.atomic
def doTransaction(vonnutzer,zunutzer,betrag,datum, kommentar):
t = GeldTransaktionen(vonnutzer=vonnutzer, zunutzer=zunutzer, betrag = betrag, datum=datum, kommentar=kommentar)
# Bargeld transaction among Bargeld-users are calculated negatively. But not, as soon as one "normal" user is a part of the transaction
if t.vonnutzer.username in ('Bargeld','Bargeld_Dieb','Bargeld_im_Tresor') and t.zunutzer.username in ('Bargeld','Bargeld_Dieb','Bargeld_im_Tresor'):
sign = -1
else:
sign = +1
# Besorge den Kontostand des 'vonNutzer' und addiere neuen Wert
vonNutzerKonto = Kontostand.objects.get(nutzer_id=t.vonnutzer)
vonNutzerKonto.stand = vonNutzerKonto.stand - sign * t.betrag
vonNutzerKonto.save()
# Besorge den Kontostand des 'zuNutzer' und addiere neuen Wert
zuNutzerKonto = Kontostand.objects.get(nutzer_id=t.zunutzer)
zuNutzerKonto.stand = zuNutzerKonto.stand + sign * t.betrag
zuNutzerKonto.save()
t.save()
# Message to the users if their bank balance becomes too high / too low
if getattr(settings,'ACTIVATE_SLACK_INTERACTION') == True:
try:
slack_MsgToUserAboutNonNormalBankBalance(t.vonnutzer.id, vonNutzerKonto.stand)
slack_MsgToUserAboutNonNormalBankBalance(t.zunutzer.id, zuNutzerKonto.stand)
except:
pass
@transaction.atomic
def makeManualTransaktion(form,currentUser):
# Durchfuehren einer Ueberweisung aus dem Admin-Bereich
idFrom = int(form['idFrom'].value())
idTo = int(form['idTo'].value())
betrag = int(100*float(form['betrag'].value()))
kommentar = form['kommentar'].value()
userFrom = KioskUser.objects.get(id=idFrom)
userTo = KioskUser.objects.get(id=idTo)
kommentar = kommentar + ' (' + userFrom.username + ' --> ' + userTo.username + ')'
GeldTransaktionen.doTransaction(vonnutzer=userFrom, zunutzer=userTo,
betrag=betrag, datum=timezone.now(), kommentar=kommentar)
return {'returnDict':{'betrag':betrag/100,'userFrom':userFrom.username,'userTo':userTo.username},
'type':'manTransaction',
'userFrom':userFrom,
'userTo':userTo,
'betrag':betrag/100,
'user':currentUser
}
@transaction.atomic
def makeEinzahlung(form,currentUser):
# Durchfuehren einer Einzahlung bzw. Auszahlung (GegenUser ist 'Bargeld')
barUser = KioskUser.objects.get(username='Bargeld')
if form['typ'].value() == 'Einzahlung':
idFrom = barUser.id
idTo = int(form['idUser'].value())
ezaz = 'eingezahlt'
else:
idTo = barUser.id
idFrom = int(form['idUser'].value())
ezaz = 'ausgezahlt'
betrag = int(100*float(form['betrag'].value()))
kommentar = form['kommentar'].value()
userFrom = KioskUser.objects.get(id=idFrom)
userTo = KioskUser.objects.get(id=idTo)
kommentar = kommentar + ' (' + form['typ'].value() + ')'
GeldTransaktionen.doTransaction(vonnutzer=userFrom, zunutzer=userTo,
betrag=betrag, datum=timezone.now(), kommentar=kommentar)
return {'type':ezaz,
'userFrom':userFrom,
'userTo':userTo,
'betrag':betrag/100,
'user':currentUser
}
# Aus den GeldTransaktionen ergibt sich eigentlich der Kontostand, aber zur Sicherheit (Loeschen von Tabelleneintraegen, Bugs, etc.) wird der Kontostand zusaetzlich gespeichert, bei jeder Transaktion wird dem aktuellen Stand die neue Transaktion angerechnet. Keine weitere Kopplung -> andere Tabellen koennen crashen, ohne den Kontostand zu beschaedigen.
class Kontostand(models.Model):
nutzer = models.OneToOneField(KioskUser, on_delete=models.CASCADE,
primary_key = True)
stand = models.IntegerField()
def __str__(self):
stnd = '%.2f' % (self.stand/100)
return(str(self.nutzer) + ": " + str(stnd) + " "+chr(8364))
# At inventory, here the paid but not taken items are registered
class ZuVielBezahlt(models.Model):
produkt = models.ForeignKey(
Produktpalette,on_delete=models.CASCADE)
datum = models.DateTimeField(auto_now_add=True)
preis = models.IntegerField()
def __str__(self):
preis = '%.2f' % (self.preis/100)
return(self.produkt.produktName + ": " + str(preis) + " "+chr(8364))
@transaction.atomic
def makeInventory(request, currentUser, inventoryList):
report = []
# Go through all items in the kiosk
for item in inventoryList:
# Check, if the item should be considered
if not request.POST.get(item["checkbutton_id_name"]) is None:
# Get the should- and is- count of the item
isVal = int(request.POST.get(item["count_id_name"]))
shouldVal = item["anzahl"]
# Check, if stock is higher, lower or equal
if shouldVal == isVal:
diff = 0
report.append({'id': item["id"],
'produkt_name': item["produkt_name"],
'verkaufspreis_ct': item["verkaufspreis_ct"],
'verlust': False,
'anzahl': diff,
'message': 'OK.'})
elif shouldVal < isVal:
diff = isVal - shouldVal
# Too much has been bought.
# First try to book back items, the "Dieb" has "bought"
userDieb = KioskUser.objects.get(username='Dieb')
diebBoughtItems = readFromDatabase('getBoughtItemsOfUser', [userDieb.id])
diebBought = [x for x in diebBoughtItems if x['produkt_id']==item['id']]
if not diebBought==[]:
noToBuyBack = diebBought[0]['anzahl_gekauft']
noToBuyBack = min(noToBuyBack,diff)
Gekauft.rueckbuchenOhneForm(userDieb.id,item['id'],noToBuyBack)
else:
noToBuyBack = 0
diff = diff - noToBuyBack
# If not possible, boooking back, a new item will be created in the open shopping list and be pushed to the kiosk. Notice in table of to much bought items will be given.
datum = timezone.now()
p = Produktpalette.objects.get(id=item["id"])
maxGroup = EinkaufslisteGroups.objects.all().aggregate(Max('gruppenID'))
maxGroup = maxGroup["gruppenID__max"] + 1
for i in range(0,diff):
e = Einkaufsliste(produktpalette = p)
e.save()
eg = EinkaufslisteGroups(einkaufslistenItem=e,gruppenID=maxGroup)
eg.save()
ok = Einkaufsliste.einkaufGroupVormerken(maxGroup,currentUser.id)
z = ZuVielBezahlt(produkt = p, datum = datum, preis = int(item["verkaufspreis_ct"]))
z.save()
angeliefert = ZumEinkaufVorgemerkt.objects.filter(einkaeufer__id=currentUser.id,
produktpalette__id=item["id"]).order_by('kiosk_ID')[:diff]
# Eintragen der Werte und Schreiben ins Kiosk
for an in angeliefert:
k = Kiosk(kiosk_ID=an.kiosk_ID,bedarfErstelltUm=an.bedarfErstelltUm,
produktpalette_id=an.produktpalette_id, einkaufsvermerkUm=an.einkaufsvermerkUm,
einkaeufer_id = an.einkaeufer_id, geliefertUm = datum,
verwalterEinpflegen_id = currentUser.id, einkaufspreis = 0)
k.save()
an.delete()
report.append({'id': item["id"],
'produkt_name': item["produkt_name"],
'verkaufspreis_ct': item["verkaufspreis_ct"],
'verlust': False,
'anzahl': diff+noToBuyBack,
'message': str(diff+noToBuyBack) + ' zu viel gekauft.'})
elif shouldVal > isVal:
# Items have not been payed. Now, the "thieve" "buys" them.
diff = shouldVal-isVal
user = KioskUser.objects.get(username='Dieb')
buyItem = item["produkt_name"]
for x in range(0,diff):
retVal = Kiosk.buyItem(buyItem,user,gekauft_per='dieb')
report.append({'id': item["id"],
'produkt_name': item["produkt_name"],
'verkaufspreis_ct': item["verkaufspreis_ct"],
'verlust': True,
'anzahl': diff,
'message': str(diff) + ' nicht bezahlt. Nun "kauft" diese der Dieb.'})
return(report)
| 37.526536 | 354 | 0.737318 |
d31caf04cb9c133ec4d3c2c4da1c9cc2f19a7a62
| 553 |
py
|
Python
|
module_functions.py
|
dcazabat/SYNOP_PY
|
7a9f1804858d72b1ec2584fed887689161036ad7
|
[
"MIT"
] | null | null | null |
module_functions.py
|
dcazabat/SYNOP_PY
|
7a9f1804858d72b1ec2584fed887689161036ad7
|
[
"MIT"
] | null | null | null |
module_functions.py
|
dcazabat/SYNOP_PY
|
7a9f1804858d72b1ec2584fed887689161036ad7
|
[
"MIT"
] | null | null | null |
import platform
import os
def creation_date(path_to_file):
if platform.system() == 'Windows':
return os.path.getctime(path_to_file)
else:
stat = os.stat(path_to_file)
try:
return stat.st_birthtime
except AttributeError:
# We're probably on Linux. No easy way to get creation dates here,
# so we'll settle for when its content was last modified.
return stat.st_time
def clear():
if os.name == "nt":
os.system("cls")
else:
os.system("clear")
| 27.65 | 78 | 0.60217 |
9f701bc08c1c4dfe533065e4d0ee3002ac64f361
| 1,661 |
py
|
Python
|
c++basic/DATA_STRUCTURES/prime_mult.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
c++basic/DATA_STRUCTURES/prime_mult.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
c++basic/DATA_STRUCTURES/prime_mult.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
from multiprocessing.pool import ThreadPool
import datetime
import numpy as np
import math
def __is_divisible__(a,b):
if a%b == 0:
return 1
return 0
def isPrime_n(a):
#is prime noob
k=0
for i in range(2,a-1):
if __is_divisible__(a,i) == 1:
k=1
break
if k==0:
return 1
else:
return 0
def isPrime_g(a):
#is prime good
k=0
for i in range(2,int(a/2)+1):
if __is_divisible__(a,i) == 1:
k=1
break
if k==0:
return 1
else:
return 0
def isPrime_b(a):
# is prime best
k=0
for i in range(2,int(math.sqrt(a)+1)):
if __is_divisible__(a,i) == 1:
k=1
break
if k==0:
return 1
else:
return 0
IS_PRIME = True
class mult_prime:
global IS_PRIME
def __is_divisible__(self,a,b):
if a%b == 0:
IS_PRIME = False
def method_prime(method_tr):
et = datetime.datetime.now()
for i in np.arange(100):
k = method_tr(i) and i
if k > 0:
print(k,end=" ")
print("\nMicro-Seconds : ",(et-datetime.datetime.now()).microseconds)
def multi_prime(a):
arr = np.arange(2,int(math.sqrt(a)+1))
#print(arr)
ThreadPool(30).imap_unordered(mult_prime.__is_divisible__,a,arr)
global IS_PRIME
if IS_PRIME == True:
print(a)
if __name__ == "__main__":
#print(__is_divisible__(5,2))
print("noob:",end="")
method_prime(isPrime_n)
print("good:",end="")
method_prime(isPrime_g)
print("best:",end="")
method_prime(isPrime_b)
print()
#multi_prime(10)
pass
| 20.506173 | 73 | 0.5587 |
e290dde6888c2655675fa623360f1477b47adc7f
| 5,415 |
py
|
Python
|
app/freelancer/tests/test_profile.py
|
mshirzad/find-my-job
|
7dca88d6233649952f0b948156a91af5b96352ff
|
[
"MIT"
] | null | null | null |
app/freelancer/tests/test_profile.py
|
mshirzad/find-my-job
|
7dca88d6233649952f0b948156a91af5b96352ff
|
[
"MIT"
] | null | null | null |
app/freelancer/tests/test_profile.py
|
mshirzad/find-my-job
|
7dca88d6233649952f0b948156a91af5b96352ff
|
[
"MIT"
] | 1 |
2022-03-06T17:44:49.000Z
|
2022-03-06T17:44:49.000Z
|
import os, tempfile
from PIL import Image
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from rest_framework import test, status
from rest_framework.test import APIClient
from core.models import Profile, Address, Gig, Education
from freelancer.serializers import ProfileSerializer
# MY_PROFILE_URL = reverse('freelancer:myProfile-list')
# ALL_PROFILES_URL = reverse('freelancer:profile-list')
def upload_profile_photo_url(profile_id):
return reverse('freelancer:myprofile-uploade-profile-photo', args=[profile_id])
def profile_details_url(profile_id):
return reverse('freelancer:myprofile-details', args=[profile_id])
def create_sample_address(**params):
defaults = {
'address_line1': 'Apt 102, St 33 NW',
'city': 'LA',
'province': 'CA',
'post_code': '33AW23',
'country': 'USA'
}
defaults.update(params)
return Address.objects.create(**defaults)
def create_sample_edu(**params):
defaults = {
'degree': 'Master',
'university': 'MIT',
'faculty': 'CS',
'start_year': 2018,
'graduation_year': 2020
}
defaults.update(params)
return Education.objects.create(**defaults)
def create_sample_profile(user, **params):
defaults = {
'phone': '+93778898899',
'profession': 'Eng',
'boi': 'Test Boi',
'address': create_sample_address(),
'education': create_sample_edu()
}
defaults.update(params)
return Profile.objects.create(user=user, **defaults)
def create_sample_gig(freelancer, **params):
defaults = {
'title': 'New Gig for Web App',
'description': 'Some Lorem ipsom',
'min_price': 40.00
}
defaults.update(params)
return Gig.objects.create(freelancer=freelancer, **defaults)
# class TestPublicProfileAPI(TestCase):
# def setUp(self):
# self.client = APIClient()
# def test_auth_required(self):
# resp = self.client.get(ALL_PROFILES_URL)
# self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED)
# class TestPrivateProfileAPI(TestCase):
# def setUp(self):
# self.client = APIClient()
# self.user = get_user_model().objects.create_user(
# email='[email protected]',
# password='test@12345'
# )
# self.user.name = 'Test User'
# self.client.force_authenticate(self.user)
# def test_show_freelancer_profile_to_other_users(self):
# user2 = get_user_model().objects.create_user(
# '[email protected]',
# 'test@1234555'
# )
# user2.name = 'Test USER'
# user3 = get_user_model().objects.create_user(
# '[email protected]',
# 'test@1234555'
# )
# user3.name = 'Test USER3'
# create_sample_profile(user=user2)
# create_sample_profile(user=user3)
# resp = self.client.get(ALL_PROFILES_URL)
# profiles = Profile.objects.all().order_by('-rating')
# serializer = ProfileSerializer(profiles, many=True)
# self.assertEqual(resp.status_code, status.HTTP_200_OK)
# self.assertEqual(resp.data, serializer.data)
# def test_show_profile_to_its_own_user(self):
# user2 = get_user_model().objects.create_user(
# '[email protected]',
# 'test@1234555'
# )
# user2.name = 'Test USER2'
# create_sample_profile(user=user2)
# create_sample_profile(user=self.user)
# resp = self.client.get(MY_PROFILE_URL)
# profile = Profile.objects.filter(user=self.user)
# serializer = ProfileSerializer(profile, many=True)
# self.assertEqual(resp.status_code, status.HTTP_200_OK)
# self.assertEqual(len(resp.data), 1)
# print(resp.data)
# print("#########")
# print(serializer.data)
# self.assertEqual(resp.data, serializer.data)
# class TestUploadProfilePhotoAPI(TestCase):
# def setUp(self):
# self.client = APIClient()
# self.user = get_user_model().objects.create_user(
# email='[email protected]',
# password='test@12345'
# )
# self.user.name = 'Test User'
# self.client.force_authenticate(self.user)
# self.profile = create_sample_profile(user= self.user)
# def tearDown(self):
# self.profile.profile_photo.delete()
# def test_upload_profile_photo(self):
# url = upload_profile_photo_url(profile_id=self.profile.id)
# with tempfile.NamedTemporaryFile(suffix='.jpg') as nft:
# img = Image.new('RGB', (10,10))
# img.save(nft, format='JPEG')
# nft.seek(0)
# resp = self.client.post(url, {'profile_photo': nft}, format='maltipart')
# self.profile.refresh_form_db()
# self.assertEqual(resp.status_code, status.HTTP_200_OK)
# self.assertIn('profile_photo', resp.data)
# self.assertTrue(os.path.exists(self.profile.profile_photo.path))
# def test_upload_profile_photo_bad_image(self):
# url = upload_profile_photo_url(profile_id=self.profile.id)
# resp = self.client.post(url, {'profile_photo': 'noImage'}, format='maltipart')
# self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
| 28.650794 | 88 | 0.635272 |
e2aefdb5d4c4918146034a0834daf3d3d9bd181b
| 1,173 |
py
|
Python
|
website/python/app.py
|
man-r/DimensionsLab
|
c94c3aec0d52326ad522a6fa41d43ec3bde87d74
|
[
"MIT"
] | null | null | null |
website/python/app.py
|
man-r/DimensionsLab
|
c94c3aec0d52326ad522a6fa41d43ec3bde87d74
|
[
"MIT"
] | 1 |
2022-03-24T06:13:52.000Z
|
2022-03-24T06:13:52.000Z
|
website/python/app.py
|
man-r/DimensionsLab
|
c94c3aec0d52326ad522a6fa41d43ec3bde87d74
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_restful import Api, Resource, reqparse
app = Flask(__name__)
api = Api(app)
users = [
{
"name": "Nicholas",
"age": 42,
"occupation": "Network Engineer"
},
{
"name": "Elvin",
"age": 32,
"occupation": "Doctor"
},
{
"name": "Jass",
"age": 22,
"occupation": "Web Developer"
}
]
class User(Resource):
def get(self, name):
for user in users:
if (name == user["name"]):
return user, 200
return "User not found", 404
def post(self, name):
parser = reqparse.RequestParser()
parser.add_argument("age")
parser.add_argument("occupation")
args = parser.parse_args()
for user in users:
if (name == user["name"]):
return "User with the name {} already exist".format(name), 400
user = {
"name": name,
"age": args["age"],
"occupation": args["occupation"]
}
users.append(user)
return user, 201
def delete(self, name):
global users
users = [user for user in users if user["name"] != name]
return "{} is deleted.".format(name), 200
api.add_resource(User, "/user/<string:name>")
app.run(debug=True)
| 19.881356 | 66 | 0.58994 |
394e6287c45dd4b169b78862df54b129511b5346
| 1,495 |
py
|
Python
|
test_pyparsing_3_1.py
|
luluci/gui_env
|
9c2ffe331c2dc8a7e128474ce9590498082de569
|
[
"MIT"
] | null | null | null |
test_pyparsing_3_1.py
|
luluci/gui_env
|
9c2ffe331c2dc8a7e128474ce9590498082de569
|
[
"MIT"
] | null | null | null |
test_pyparsing_3_1.py
|
luluci/gui_env
|
9c2ffe331c2dc8a7e128474ce9590498082de569
|
[
"MIT"
] | null | null | null |
import pyparsing as pp
def act_comment(token):
print("comment: " + str(token))
def act_keyword(token):
print("keyword: " + str(token))
def act_sc(token):
print("semicolon: " + str(token))
def act_parser_start(token):
print("parser_start: " + str(token))
def act_parser_end(token):
print("parser_end: " + str(token))
comment_parser = pp.Group(
(pp.Literal("//") + pp.restOfLine)
| pp.cStyleComment
).setParseAction(act_comment)
pp_key1 = pp.Keyword("hoge")
pp_key2 = pp.Keyword("fuga")
pp_sc = pp.Literal(";")
statement = pp.Group(
pp.Empty().setParseAction(act_parser_start)
+ pp_key1.setParseAction(act_keyword)
+ pp_key2.setParseAction(act_keyword)
+ pp_sc.setParseAction(act_sc)
+ pp.Empty().setParseAction(act_parser_end)
)
parser = statement[1, ...].ignore(comment_parser)
test_text = """\
hoge fuga; // comment1
hoge /* comment2-1 */ fuga; /* comment2-2 */
// comment3
hoge fuga; // comment4
"""
ret = parser.parseString(test_text)
print(ret)
"""
[result]
parser_start: []
keyword: ['hoge']
keyword: ['fuga']
semicolon: [';']
comment: [['//', ' comment1']]
parser_end: []
parser_start: []
keyword: ['hoge']
comment: [['/* comment2-1 */']]
keyword: ['fuga']
semicolon: [';']
comment: [['/* comment2-2 */']]
comment: [['//', ' comment3']]
parser_end: []
parser_start: []
keyword: ['hoge']
keyword: ['fuga']
semicolon: [';']
comment: [['//', ' comment4']]
parser_end: []
parser_start: []
[['hoge', 'fuga', ';'], ['hoge', 'fuga', ';'], ['hoge', 'fuga', ';']]
"""
| 19.166667 | 69 | 0.646154 |
200a07a8f6c323cf28ccdef7bc5e7ac20a331280
| 206 |
py
|
Python
|
src/onegov/winterthur/forms/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/winterthur/forms/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/winterthur/forms/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.winterthur.forms.mission_report import MissionReportForm
from onegov.winterthur.forms.mission_report import MissionReportVehicleForm
__all__ = ('MissionReportForm', 'MissionReportVehicleForm')
| 41.2 | 75 | 0.868932 |
6490cc9b76431e255aeab4722b02c97b8014ad01
| 5,628 |
py
|
Python
|
src/flocker/blueprints/mvg-frame/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
src/flocker/blueprints/mvg-frame/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
src/flocker/blueprints/mvg-frame/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
import os
import time
from flask import Blueprint, render_template, redirect, url_for, request, current_app
from . import mvg
from . import displays
from PIL import Image, ImageFont, ImageDraw
def create_bp(app):
bp_mvg = Blueprint('mvg-frame', __name__, url_prefix='/mvg-frame')
displays.init(app)
@bp_mvg.route('/index/')
@bp_mvg.route('/')
def index():
return render_template('mvg-frame/index.html.j2', data=displays.data())
@bp_mvg.route("/updateData/<string:mac>", methods={'GET', 'POST'})
def update_data(mac):
data = {}
# Check if a specific station ID has been passed
if 'station_id' in request.args:
station_id = request.args.get('station_id')
station_name = mvg.get_name_for_id(station_id)
# Only set the data if ID is valid, i.e. returns a valid station name
if station_name is not None:
data['station_id'] = station_id
data['station_name'] = station_name
# Populate data with form inputs
for key, value in request.form.items():
if key in ['station_name']:
data[key] = value
# vv Makes sure that the old station ID is not accidentally kept
data['station_id'] = None
if key in ['offset_top', 'offset_bottom', 'offset_left', 'offset_right'] and value.isnumeric():
data[key] = int(value)
# Upate the stored data
displays.update(mac, data)
# Check if a station ID has already been passed / set
if data['station_id'] is None:
# Find all station IDs for the station name
station_ids = mvg.get_ids_for_satation(data['station_name'])
# If not exactly one station was found...
if len(station_ids) == 1:
# ... save the found station ID
for key, value in station_ids.items():
displays.update(mac, {'station_id': key})
elif len(station_ids) > 1:
# ... or let the user choose and pass (via GET) a station ID
return render_template('mvg-frame/index.html.j2', mac=mac, station_ids=station_ids)
return redirect(url_for('mvg-frame.index'))
# Functions called from frame
@bp_mvg.route('/update/<string:mac>')
def update(mac):
# Make a new empty image in the size of the screen
img_path = os.path.join(current_app.instance_path, 'mvg-{}.png'.format(mac.replace(':', '')))
(w, h) = displays.size_for(mac)
img = Image.new('RGB', (w, h), (0, 0, 0))
draw = ImageDraw.Draw(img)
font_dir = os.path.join('/'.join(os.path.abspath(__file__).split('/')[0:-1]), 'static')
font_normal = ImageFont.truetype(os.path.join(font_dir, 'EXCITE.otf'), 42)
font_bold = ImageFont.truetype(os.path.join(font_dir, 'EXCITE_B.otf'), 42)
station_id, _ = displays.station_for(mac)
if station_id is None:
draw.text((w/2, h/2), "STATION ERROR", fill=(255, 255, 255), font=font_bold, anchor='mm')
img.save(img_path, 'PNG')
return "0"
(o_t, o_b, o_l, o_r) = displays.offset_for(mac)
draw.polygon([
o_l, o_t,
o_l, h-o_b,
w-o_r, h-o_b,
w-o_r, o_t,
], fill=(255, 255, 255))
# Get the departures for the station ID
departures = mvg.get_departures_for_id(station_id, limit=7)
if len(departures) == 0:
draw.text((w/2, h/2), "NO DATA", fill=(0, 0, 0), font=font_bold, anchor='mm')
img.save(img_path, 'PNG')
return "0"
# departure_times = "\n".join([time.strftime('%H:%M', d['departure']) for d in departures])
departure_minutes = "\n".join(["{:.0f}".format((time.mktime(d['departure'])-time.time())/60) for d in departures])
departure_service = "\n".join(["{} {}".format(d['service'], d['destination']) for d in departures])
draw.multiline_text((o_l + 10, o_t+5), departure_minutes, font=font_bold, fill=(0, 0, 0))
draw.multiline_text((o_l + 100, o_t+5), departure_service, font=font_normal, fill=(0, 0, 0))
img.save(img_path, 'PNG')
return "1"
@bp_mvg.route('/imageData/<string:mac>') # GET: segCount & seg
def image_data(mac):
seg_count = int(request.args.get('segCount', default="1"))
seg = int(request.args.get('seg', default="0"))
img_path = os.path.join(current_app.instance_path, 'mvg-{}.png'.format(mac.replace(':', '')))
img = Image.open(img_path)
(w, h) = img.size
img = img.rotate(180)
crop_box = (0, seg*h/seg_count, w, (seg+1)*h/seg_count)
img = img.crop(crop_box)
(w, h) = img.size
data = ''
pixels = img.load()
for y in range(h):
for x in range(0, w, 4):
black = [all([pixel == 0 for pixel in pixels[x+px, y]]) for px in range(4)]
white = [all([pixel == 255 for pixel in pixels[x+px, y]]) for px in range(4)]
new_data = ''
for z in range(4):
if white[z]:
new_data += '11'
elif black[z]:
new_data += '00'
else:
new_data += '01'
data += '{:02x}'.format(int(new_data, base=2))
return data
@bp_mvg.route('/delayTime/<string:mac>')
def delay_time(mac):
return "30000"
return bp_mvg
| 39.083333 | 122 | 0.555792 |
b3dac29fab9b30a15f3567e16ce2def62510c239
| 5,959 |
py
|
Python
|
src/main/apps/mlops/utils/model_loader.py
|
Nouvellie/django-tflite
|
1d08fdc8a2ec58886d7d2b8d40e7b3598613caca
|
[
"MIT"
] | 2 |
2021-08-23T21:56:07.000Z
|
2022-01-20T13:52:19.000Z
|
src/main/apps/mlops/utils/model_loader.py
|
Nouvellie/django-tflite
|
1d08fdc8a2ec58886d7d2b8d40e7b3598613caca
|
[
"MIT"
] | null | null | null |
src/main/apps/mlops/utils/model_loader.py
|
Nouvellie/django-tflite
|
1d08fdc8a2ec58886d7d2b8d40e7b3598613caca
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
from .file_loader import (
CatsvsdogsFileLoader,
FashionMnistFileLoader,
ImdbSentimentFileLoader,
StackoverflowFileLoader,
)
from .model_input import ModelInputGenerator
from .output_decoder import OutputDecoder
from .pipeline import Pipeline
from .preprocessing import pipeline_function_register
from abc import (
ABC,
abstractmethod,
)
from main.settings import (
DEBUG,
MODEL_ROOT,
)
from tensorflow import (
convert_to_tensor,
lite,
)
from tensorflow.keras.models import model_from_json
from typing import (
Generic,
TypeVar,
)
SELFCLASS = TypeVar('SELFCLASS')
class BaseModelLoader(ABC):
"""Metaclass for defining the model loader."""
def __new__(cls, model_dir: str, *args, **kwargs) -> Generic[SELFCLASS]:
return super(BaseModelLoader, cls).__new__(cls, *args, **kwargs)
def __init__(self, model_dir: str) -> None:
self.model_type = int(model_dir.split("/")[0])
self.model_dir = model_dir
self.model_preload()
self.preprocessing_load()
self.postprocessing_load()
self.model_input_load()
self.preload_file_loader()
def preprocessing_load(self) -> None:
"""Function to apply preprocessing to an array."""
preprocessing_path = os.path.join(MODEL_ROOT + f"{self.model_dir}/preprocessing.json")
self.preprocessing = Pipeline()
self.preprocessing.from_json(preprocessing_path)
def postprocessing_load(self) -> None:
"""Function to apply postprocessing to model output."""
postprocessing_path = os.path.join(MODEL_ROOT + f"{self.model_dir}/postprocessing.json")
self.postprocessing = OutputDecoder()
self.postprocessing.from_json(postprocessing_path)
def model_input_load(self) -> None:
"""Creates a generic modelinput."""
self.ModelInput = ModelInputGenerator()
def preload_file_loader(self) -> None:
"""Function to load the file as an array."""
if self.model_type == 1:
self.file_loader = FashionMnistFileLoader()
elif self.model_type == 2:
self.file_loader = ImdbSentimentFileLoader()
elif self.model_type == 3:
self.file_loader = StackoverflowFileLoader()
elif self.model_type == 4:
self.file_loader = CatsvsdogsFileLoader()
else:
pass
def generate_model_input(self, model_input: any) -> list:
"""From file -> array -> preprocessing -> model input."""
model_input = self.file_loader(model_input)
model_input = self.preprocessing(model_input)
model_input = self.ModelInput.model_input_generator(model_input)
return model_input
@abstractmethod
def model_preload(self) -> None:
"""This function is used to generate the preload of the model."""
pass
@abstractmethod
def predict(self, model_input: any, confidence: bool) -> dict:
"""With this function the inference of the model is generated."""
pass
class TFLiteModelLoader(BaseModelLoader):
"""Class to generate predictions from a TFLite model."""
NUM_THREADS = 4
def model_preload(self) -> None:
tflite_name = [name for name in os.listdir(MODEL_ROOT + f"{self.model_dir}") if name.endswith(".tflite")][0]
model_path = os.path.join(MODEL_ROOT + f"{self.model_dir}/{tflite_name}")
if self.NUM_THREADS > 0:
self.interpreter = lite.Interpreter(
model_path=str(model_path), num_threads=self.NUM_THREADS)
else:
self.interpreter = lite.Interpreter(model_path=str(model_path))
self.interpreter.allocate_tensors()
self.input_details = self.interpreter.get_input_details()
self.output_details = self.interpreter.get_output_details()
# print(f"The model {self.model_dir.title()} has been successfully pre-loaded. (TFLITE)")
def predict(self, model_input: any, confidence: bool = False) -> dict:
model_input = self.generate_model_input(model_input)
if self.model_type in (1, 4):
for i, j in enumerate(model_input):
model_input_tensor = convert_to_tensor(
np.array(j), np.float32)
self.interpreter.set_tensor(
self.input_details[i]['index'], model_input_tensor)
elif self.model_type in (2, 3):
for i, j in enumerate(model_input):
self.interpreter.set_tensor(
self.input_details[i]['index'], j)
self.interpreter.invoke()
prediction = self.interpreter.get_tensor(
self.output_details[0]['index'])
result = self.postprocessing.output_decoding(
model_output=prediction, confidence=confidence)
return result
class HDF5JSONModelLoader(BaseModelLoader):
"""Class to generate predictions from a HDF5JSON model."""
def model_preload(self) -> None:
hdf5_path = os.path.join(MODEL_ROOT + f"{self.model_dir}/model.hdf5")
json_path = os.path.join(MODEL_ROOT + f"{self.model_dir}/model.json")
with open(json_path, "r") as jp:
self.model = model_from_json(jp.read())
self.model.load_weights(hdf5_path)
# print(f"The model {self.model_dir.title()} has been successfully pre-loaded. (HDF5-JSON)")
def predict(self, model_input: any, confidence: bool = False) -> dict:
model_input = self.generate_model_input(model_input)
prediction = self.model.predict(model_input)
result = self.postprocessing.output_decoding(
model_output=prediction, confidence=confidence)
return result
class CheckpointModelLoader(BaseModelLoader):
"""Class to generate predictions from a Checkpoint model."""
def model_preload(self) -> None:
pass
def predict(self, model_input: any, confidence: bool) -> dict:
pass
| 35.260355 | 116 | 0.664205 |
37369717bf4744bf752a9e5e1db557ed280d3c7a
| 496 |
py
|
Python
|
Blatt-02/Sonstiges/tribonacci.py
|
MartinThoma/prog-ws1213
|
c82a2fb81bac774f8d3214a25c33124a9f512ef0
|
[
"MIT"
] | 1 |
2017-08-10T13:12:03.000Z
|
2017-08-10T13:12:03.000Z
|
Blatt-02/Sonstiges/tribonacci.py
|
siviaseason/prog-ws1213
|
c82a2fb81bac774f8d3214a25c33124a9f512ef0
|
[
"MIT"
] | null | null | null |
Blatt-02/Sonstiges/tribonacci.py
|
siviaseason/prog-ws1213
|
c82a2fb81bac774f8d3214a25c33124a9f512ef0
|
[
"MIT"
] | 2 |
2016-06-08T20:56:04.000Z
|
2022-03-11T20:12:37.000Z
|
def tribonacci(n):
if n < 3:
return n
else:
return tribonacci(n-1) + tribonacci(n-2) + tribonacci(n-3)
def tribonacciBottomUp(n):
last = 1
secondLast = 1
thirdLast = 1
for i in range(2,n):
new = last + secondLast + thirdLast
thirdLast = secondLast
secondLast = last
last = new
return last
def fillIt(n):
solutions
for i in xrange(0,40+1):
print("<tr><td>%i</td><td>%i</td></tr>" % (i, tribonacciBottomUp(i)))
| 21.565217 | 73 | 0.568548 |
03fcad1d1936cee28640a02b1c3ebdfc5c4cd278
| 53 |
py
|
Python
|
python_lessons/MtMk_Test_Files/numpy_test.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
python_lessons/MtMk_Test_Files/numpy_test.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
python_lessons/MtMk_Test_Files/numpy_test.py
|
1986MMartin/coding-sections-markus
|
e13be32e5d83e69250ecfb3c76a04ee48a320607
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
arr = np.arange(0,11)
print(arr)
| 17.666667 | 22 | 0.698113 |
45a6eb2cafb85abd876f4684bf56dbe0066463d9
| 16,694 |
py
|
Python
|
research/cv/eppmvsnet/src/networks.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/eppmvsnet/src/networks.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/eppmvsnet/src/networks.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""sub-networks of EPP-MVSNet"""
import numpy as np
import mindspore
import mindspore.ops as P
from mindspore import nn
from mindspore import Tensor, Parameter
from src.modules import depth_regression, soft_argmin, entropy
class BasicBlockA(nn.Cell):
"""BasicBlockA"""
def __init__(self, in_channels, out_channels, stride):
super(BasicBlockA, self).__init__()
self.conv2d_0 = nn.Conv2d(in_channels, out_channels, 3, stride=stride, padding=1, pad_mode="pad")
self.conv2d_1 = nn.Conv2d(in_channels, out_channels, 1, stride=stride, padding=0, pad_mode="valid")
self.batchnorm2d_2 = nn.BatchNorm2d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.batchnorm2d_3 = nn.BatchNorm2d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_4 = nn.ReLU()
self.conv2d_5 = nn.Conv2d(out_channels, out_channels, 3, stride=1, padding=(1, 1, 1, 1), pad_mode="pad")
self.batchnorm2d_6 = nn.BatchNorm2d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_8 = nn.ReLU()
def construct(self, x):
"""construct"""
x1 = self.conv2d_0(x)
x1 = self.batchnorm2d_2(x1)
x1 = self.relu_4(x1)
x1 = self.conv2d_5(x1)
x1 = self.batchnorm2d_6(x1)
res = self.conv2d_1(x)
res = self.batchnorm2d_3(res)
out = P.Add()(x1, res)
out = self.relu_8(out)
return out
class BasicBlockB(nn.Cell):
"""BasicBlockB"""
def __init__(self, in_channels, out_channels):
super(BasicBlockB, self).__init__()
self.conv2d_0 = nn.Conv2d(in_channels, out_channels, 3, stride=1, padding=1, pad_mode="pad")
self.batchnorm2d_1 = nn.BatchNorm2d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_2 = nn.ReLU()
self.conv2d_3 = nn.Conv2d(in_channels, out_channels, 3, stride=1, padding=1, pad_mode="pad")
self.batchnorm2d_4 = nn.BatchNorm2d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_6 = nn.ReLU()
def construct(self, x):
"""construct"""
x1 = self.conv2d_0(x)
x1 = self.batchnorm2d_1(x1)
x1 = self.relu_2(x1)
x1 = self.conv2d_3(x1)
x1 = self.batchnorm2d_4(x1)
res = x
out = P.Add()(x1, res)
out = self.relu_6(out)
return out
class UNet2D(nn.Cell):
"""UNet2D"""
def __init__(self):
super(UNet2D, self).__init__()
self.conv2d_0 = nn.Conv2d(3, 16, 5, stride=2, padding=2, pad_mode="pad")
self.batchnorm2d_1 = nn.BatchNorm2d(16, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.leakyrelu_2 = nn.LeakyReLU(alpha=0.009999999776482582)
self.convblocka_0 = BasicBlockA(16, 32, 1)
self.convblockb_0 = BasicBlockB(32, 32)
self.convblocka_1 = BasicBlockA(32, 64, 2)
self.convblockb_1 = BasicBlockB(64, 64)
self.convblocka_2 = BasicBlockA(64, 128, 2)
self.convblockb_2 = BasicBlockB(128, 128)
self.conv2dbackpropinput_51 = P.Conv2DBackpropInput(64, 3, stride=2, pad=1, pad_mode="pad")
self.conv2dbackpropinput_51_weight = Parameter(Tensor(
np.random.uniform(0, 1, (128, 64, 3, 3)).astype(np.float32)))
self.conv2d_54 = nn.Conv2d(128, 64, 3, stride=1, padding=1, pad_mode="pad")
self.convblockb_3 = BasicBlockB(64, 64)
self.conv2dbackpropinput_62 = P.Conv2DBackpropInput(32, 3, stride=2, pad=1, pad_mode="pad")
self.conv2dbackpropinput_62_weight = Parameter(Tensor(
np.random.uniform(0, 1, (64, 32, 3, 3)).astype(np.float32)))
self.conv2d_65 = nn.Conv2d(64, 32, 3, stride=1, padding=1, pad_mode="pad")
self.convblockb_4 = BasicBlockB(32, 32)
self.conv2d_52 = nn.Conv2d(128, 32, 3, stride=1, padding=1, pad_mode="pad")
self.conv2d_63 = nn.Conv2d(64, 32, 3, stride=1, padding=1, pad_mode="pad")
self.conv2d_73 = nn.Conv2d(32, 32, 3, stride=1, padding=1, pad_mode="pad")
self.concat = P.Concat(axis=1)
param_dict = mindspore.load_checkpoint("./ckpts/feat_ext.ckpt")
params_not_loaded = mindspore.load_param_into_net(self, param_dict, strict_load=True)
print(params_not_loaded)
def construct(self, imgs):
"""construct"""
_, _, h, w = imgs.shape
x = self.conv2d_0(imgs)
x = self.batchnorm2d_1(x)
x = self.leakyrelu_2(x)
x1 = self.convblocka_0(x)
x1 = self.convblockb_0(x1)
x2 = self.convblocka_1(x1)
x2 = self.convblockb_1(x2)
x3 = self.convblocka_2(x2)
x3 = self.convblockb_2(x3)
x2_upsample = self.conv2dbackpropinput_51(x3, self.conv2dbackpropinput_51_weight,
(x2.shape[0], x2.shape[1], h // 4, w // 4))
x2_upsample = self.concat((x2_upsample, x2,))
x2_upsample = self.conv2d_54(x2_upsample)
x2_upsample = self.convblockb_3(x2_upsample)
x1_upsample = self.conv2dbackpropinput_62(x2_upsample, self.conv2dbackpropinput_62_weight,
(x1.shape[0], x1.shape[1], h // 2, w // 2))
x1_upsample = self.concat((x1_upsample, x1,))
x1_upsample = self.conv2d_65(x1_upsample)
x1_upsample = self.convblockb_4(x1_upsample)
x3_final = self.conv2d_52(x3)
x2_final = self.conv2d_63(x2_upsample)
x1_final = self.conv2d_73(x1_upsample)
return x3_final, x2_final, x1_final
class ConvBnReLu(nn.Cell):
"""ConvBnReLu"""
def __init__(self, in_channels, out_channels):
super(ConvBnReLu, self).__init__()
self.conv3d_0 = nn.Conv3d(in_channels, out_channels, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0),
pad_mode="pad")
self.batchnorm3d_1 = nn.BatchNorm3d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.leakyrelu_2 = nn.LeakyReLU(alpha=0.009999999776482582)
def construct(self, x):
"""construct"""
x = self.conv3d_0(x)
x = self.batchnorm3d_1(x)
x = self.leakyrelu_2(x)
return x
class CostCompression(nn.Cell):
"""CostCompression"""
def __init__(self):
super(CostCompression, self).__init__()
self.basicblock_0 = ConvBnReLu(8, 64)
self.basicblock_1 = ConvBnReLu(64, 64)
self.basicblock_2 = ConvBnReLu(64, 8)
param_dict = mindspore.load_checkpoint("./ckpts/stage1_cost_compression.ckpt")
params_not_loaded = mindspore.load_param_into_net(self, param_dict, strict_load=True)
print(params_not_loaded)
def construct(self, x):
"""construct"""
x = self.basicblock_0(x)
x = self.basicblock_1(x)
x = self.basicblock_2(x)
return x
class Pseudo3DBlock_A(nn.Cell):
"""Pseudo3DBlock_A"""
def __init__(self, in_channels, out_channels):
super(Pseudo3DBlock_A, self).__init__()
self.conv3d_0 = nn.Conv3d(in_channels, out_channels, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1),
pad_mode="pad")
self.conv3d_1 = nn.Conv3d(out_channels, out_channels, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0),
pad_mode="pad")
self.batchnorm3d_2 = nn.BatchNorm3d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_3 = nn.ReLU()
self.conv3d_4 = nn.Conv3d(out_channels, out_channels, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1),
pad_mode="pad")
self.conv3d_5 = nn.Conv3d(out_channels, out_channels, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0),
pad_mode="pad")
self.batchnorm3d_6 = nn.BatchNorm3d(out_channels, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_8 = nn.ReLU()
def construct(self, x):
"""construct"""
x1 = self.conv3d_0(x)
x1 = self.conv3d_1(x1)
x1 = self.batchnorm3d_2(x1)
x1 = self.relu_3(x1)
x1 = self.conv3d_4(x1)
x1 = self.conv3d_5(x1)
x1 = self.batchnorm3d_6(x1)
res = x
out = P.Add()(x1, res)
out = self.relu_8(out)
return out
class Pseudo3DBlock_B(nn.Cell):
"""Pseudo3DBlock_B"""
def __init__(self):
super(Pseudo3DBlock_B, self).__init__()
self.conv3d_0 = nn.Conv3d(8, 8, (1, 3, 3), stride=(1, 2, 2), padding=(0, 0, 1, 1, 1, 1), pad_mode="pad")
self.conv3d_1 = nn.Conv3d(8, 16, (1, 1, 1), stride=2, padding=0, pad_mode="valid")
self.conv3d_2 = nn.Conv3d(8, 16, (3, 1, 1), stride=(2, 1, 1), padding=(1, 1, 0, 0, 0, 0), pad_mode="pad")
self.batchnorm3d_3 = nn.BatchNorm3d(16, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.batchnorm3d_4 = nn.BatchNorm3d(16, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_5 = nn.ReLU()
self.conv3d_6 = nn.Conv3d(16, 16, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1), pad_mode="pad")
self.conv3d_7 = nn.Conv3d(16, 16, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0), pad_mode="pad")
self.batchnorm3d_8 = nn.BatchNorm3d(16, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.relu_10 = nn.ReLU()
def construct(self, x):
"""construct"""
x1 = self.conv3d_0(x)
x1 = self.conv3d_2(x1)
x1 = self.batchnorm3d_4(x1)
x1 = self.relu_5(x1)
x1 = self.conv3d_6(x1)
x1 = self.conv3d_7(x1)
x1 = self.batchnorm3d_8(x1)
res = self.conv3d_1(x)
res = self.batchnorm3d_3(res)
out = P.Add()(x1, res)
out = self.relu_10(out)
return out
class CoarseStageRegFuse(nn.Cell):
"""CoarseStageRegFuse"""
def __init__(self):
super(CoarseStageRegFuse, self).__init__()
self.basicblocka_0 = Pseudo3DBlock_A(8, 8)
self.basicblockb_0 = Pseudo3DBlock_B()
self.conv3dtranspose_21 = nn.Conv3dTranspose(16, 8, 3, stride=2, padding=1, pad_mode="pad", output_padding=1)
self.conv3d_23 = nn.Conv3d(16, 8, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1), pad_mode="pad")
self.conv3d_24 = nn.Conv3d(8, 8, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0), pad_mode="pad")
self.conv3d_25 = nn.Conv3d(8, 1, 3, stride=1, padding=1, pad_mode="pad")
self.concat_1 = P.Concat(axis=1)
self.squeeze_1 = P.Squeeze(axis=1)
param_dict = mindspore.load_checkpoint("./ckpts/stage1_reg_fuse.ckpt")
params_not_loaded = mindspore.load_param_into_net(self, param_dict, strict_load=True)
print(params_not_loaded)
def construct(self, fused_interim, depth_values):
"""construct"""
x1 = self.basicblocka_0(fused_interim)
x2 = self.basicblockb_0(x1)
x1_upsample = self.conv3dtranspose_21(x2)
cost_volume = self.concat_1((x1_upsample, x1))
cost_volume = self.conv3d_23(cost_volume)
cost_volume = self.conv3d_24(cost_volume)
score_volume = self.conv3d_25(cost_volume)
score_volume = self.squeeze_1(score_volume)
prob_volume, _, prob_map = soft_argmin(score_volume, dim=1, keepdim=True, window=2)
est_depth = depth_regression(prob_volume, depth_values, keep_dim=True)
return est_depth, prob_map, prob_volume
class CoarseStageRegPair(nn.Cell):
"""CoarseStageRegPair"""
def __init__(self):
super(CoarseStageRegPair, self).__init__()
self.basicblocka_0 = Pseudo3DBlock_A(8, 8)
self.basicblockb_0 = Pseudo3DBlock_B()
self.conv3dtranspose_21 = nn.Conv3dTranspose(16, 8, 3, stride=2, padding=1, pad_mode="pad", output_padding=1)
self.concat_22 = P.Concat(axis=1)
self.conv3d_23 = nn.Conv3d(16, 8, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1), pad_mode="pad")
self.conv3d_24 = nn.Conv3d(8, 8, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0), pad_mode="pad")
self.conv3d_25 = nn.Conv3d(8, 1, 3, stride=1, padding=1, pad_mode="pad")
self.conv2d_38 = nn.Conv2d(1, 8, 3, stride=1, padding=1, pad_mode="pad")
self.batchnorm2d_39 = nn.BatchNorm2d(num_features=8, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.leakyrelu_40 = nn.LeakyReLU(alpha=0.009999999776482582)
self.conv2d_41 = nn.Conv2d(8, 8, 3, stride=1, padding=1, pad_mode="pad")
self.batchnorm2d_42 = nn.BatchNorm2d(num_features=8, eps=9.999999747378752e-06, momentum=0.8999999761581421)
self.leakyrelu_43 = nn.LeakyReLU(alpha=0.009999999776482582)
self.conv2d_45 = nn.Conv2d(8, 1, 3, stride=1, padding=1, pad_mode="pad")
self.conv2d_46 = nn.Conv2d(8, 1, 3, stride=1, padding=1, pad_mode="pad")
self.concat_1 = P.Concat(axis=1)
self.squeeze_1 = P.Squeeze(axis=1)
param_dict = mindspore.load_checkpoint("./ckpts/stage1_reg_pair.ckpt")
params_not_loaded = mindspore.load_param_into_net(self, param_dict, strict_load=True)
print(params_not_loaded)
def construct(self, cost_volume, depth_values):
"""construct"""
x1 = self.basicblocka_0(cost_volume)
x2 = self.basicblockb_0(x1)
x1_upsample = self.conv3dtranspose_21(x2)
interim = self.concat_1((x1_upsample, x1))
interim = self.conv3d_23(interim)
interim = self.conv3d_24(interim)
score_volume = self.conv3d_25(interim)
score_volume = self.squeeze_1(score_volume)
prob_volume, _ = soft_argmin(score_volume, dim=1, keepdim=True)
est_depth = depth_regression(prob_volume, depth_values, keep_dim=True)
entropy_ = entropy(prob_volume, dim=1, keepdim=True)
x = self.conv2d_38(entropy_)
x = self.batchnorm2d_39(x)
x = self.leakyrelu_40(x)
x = self.conv2d_41(x)
x = self.batchnorm2d_42(x)
x = self.leakyrelu_43(x)
out = P.Add()(x, entropy_)
uncertainty_map = self.conv2d_45(out)
occ = self.conv2d_46(out)
return interim, est_depth, uncertainty_map, occ
class StageRegFuse(nn.Cell):
"""StageRegFuse"""
def __init__(self, ckpt_path):
super(StageRegFuse, self).__init__()
self.basicblocka_0 = Pseudo3DBlock_A(8, 8)
self.basicblocka_1 = Pseudo3DBlock_A(8, 8)
self.basicblockb_0 = Pseudo3DBlock_B()
self.basicblocka_2 = Pseudo3DBlock_A(16, 16)
self.conv3dtranspose_38 = nn.Conv3dTranspose(16, 8, 3, stride=2, padding=1, pad_mode="pad", output_padding=1)
self.concat_39 = P.Concat(axis=1)
self.conv3d_40 = nn.Conv3d(16, 8, (1, 3, 3), stride=1, padding=(0, 0, 1, 1, 1, 1), pad_mode="pad")
self.conv3d_41 = nn.Conv3d(8, 8, (3, 1, 1), stride=1, padding=(1, 1, 0, 0, 0, 0), pad_mode="pad")
self.conv3d_42 = nn.Conv3d(8, 1, 3, stride=1, padding=1, pad_mode="pad")
self.concat_1 = P.Concat(axis=1)
self.squeeze_1 = P.Squeeze(axis=1)
param_dict = mindspore.load_checkpoint(ckpt_path)
params_not_loaded = mindspore.load_param_into_net(self, param_dict, strict_load=True)
print(params_not_loaded)
def construct(self, fused_interim, depth_values):
"""construct"""
x1 = self.basicblocka_0(fused_interim)
x1 = self.basicblocka_1(x1)
x2 = self.basicblockb_0(x1)
x2 = self.basicblocka_2(x2)
x1_upsample = self.conv3dtranspose_38(x2)
cost_volume = self.concat_1((x1_upsample, x1))
cost_volume = self.conv3d_40(cost_volume)
cost_volume = self.conv3d_41(cost_volume)
score_volume = self.conv3d_42(cost_volume)
score_volume = self.squeeze_1(score_volume)
prob_volume, _, prob_map = soft_argmin(score_volume, dim=1, keepdim=True, window=2)
est_depth = depth_regression(prob_volume, depth_values, keep_dim=True)
return est_depth, prob_map, prob_volume
| 41.424318 | 117 | 0.636157 |
94216394fa225ea1e01514370a7ab54fc7850fd6
| 4,394 |
py
|
Python
|
StateTracing/dataloader.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | null | null | null |
StateTracing/dataloader.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | null | null | null |
StateTracing/dataloader.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | 1 |
2020-09-08T13:42:16.000Z
|
2020-09-08T13:42:16.000Z
|
from torch import tensor
from numpy.random import choice,shuffle
max_len = 128
def random_cut(length):
s = choice(length-max_len+1)
return s,s+max_len
def differeniate(statesA,statesB):
return [[i,b] for i,(a,b) in enumerate(zip(map(int,statesA),map(int,statesB))) if b!=a]
def generate_targets(sequences):
ciid = -1
Xs,Ys = [],[]
for i,(iid,state) in enumerate(sequences):
if iid != ciid:
cstate = state # updata state
ciid = iid
else:
cstate = sequences[i-1][1]
diff = differeniate(cstate,state)
if len(diff) > 0:
for d in diff:
Xs.append(state)
Ys.append(d)
return Xs,Ys
def pad_sequence3(sequences,padding = 0):
"""
every element in sequence is a 2-d list
with shape [time_steps,dim]
the dim is fixed
"""
d2s = [len(seq) for seq in sequences]
d3 = sequences[0][0].__len__()
result = []
max_l = max(d2s)
for seq,l in zip(sequences,d2s):
result.append(seq + [[padding for _ in range(d3)] for i in range((max_l-l))])
return result
def pad_sequence2(sequences,padding=0):
lens = [len(seq) for seq in sequences]
ml = max(lens)
results = []
for seq,l in zip(sequences,lens):
results.append(seq+[padding for i in range(ml-l)])
return results
def read_data(file_name):
data = []
add = data.append
tmp = []
with open(file_name,'r',encoding='utf8') as f:
while True:
line = f.readline()
if not line:break
if line.strip()=="":
if tmp!=[]:
add(tmp)
tmp = []
else:
item,state = line.strip().split()
# item + 1 bacause of padding value is 0
tmp.append([int(item)+1,list(map(int,state))])
if tmp != []:
add(tmp)
return data
def load_init():
results = {}
items = {}
for line in open('./items.dat','r',encoding='utf8'):
itm,id_ = line.strip().split(' ')
items[itm]=id_
for line in open('./init.dat','r',encoding='utf8'):
itm,state = line.strip().split(' ')
results[int(items[itm])+1] = [1 if e=='0' else 0 for e in state]
return results
class DataLoader():
def __init__(self,data,inits):
self.data = data
self.size = len(data)
self.inits = inits
def shuffle(self,):
shuffle(self.data)
def samples(self,batch_size):
cursor = 0
self.shuffle()
while cursor < self.size:
data = self.data[cursor:cursor+batch_size]
cursor += batch_size
states,masks = [],[]
for d in data:
if len(d)>max_len:
s,e = random_cut(len(d))
d = d[s:e]
itms,sts = zip(*d)
msk = [self.inits[i] for i in itms]
states.append(list(sts))
masks.append(msk)
yield pad_sequence3(states),pad_sequence3(masks)
def check(Xs,Ys):
for xs,ys in zip(Xs,Ys):
for i in range(len(xs)-1):
x_ = [v for v in xs[i]]
if sum(x_) == 0:
break
pos,val = ys[i]
x_[pos] = val
if x_ != xs[i+1]:
print(x_)
print('------')
print(xs[i+1])
print(' ')
print(' ')
print('ok')
if __name__ == '__main__':
from numpy import array
inits = load_init()
if 'data' not in dir():
data = read_data('../data/test.1.dat')
dl = DataLoader(data,inits)
for x,y in dl.samples(100):
x = array(x)
y = array(y)
print(x.shape,y.shape)
break
#
# items,sequences = zip(*data[123])
# x = data[15]
# y = generate_targets(x)
# items,states = zip(*x)
#
# x = [[1,[4,0,0,0]],
# [1,[4,1,0,0]],
# [1,[4,0,0,0]],
# [1,[4,2,0,0]]]
#
#
# targets = generate_targets(x)
| 26.46988 | 92 | 0.466318 |
84aa7a485b23bade222cd7a7bb91c2a1c86b90b1
| 5,893 |
py
|
Python
|
research/cv/centernet_det/infer/sdk/main.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/centernet_det/infer/sdk/main.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/centernet_det/infer/sdk/main.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# !/usr/bin/env python
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
"""
Sdk internece
"""
import argparse
import json
import os
import time
import copy
import cv2
import numpy as np
from api.infer import SdkApi
from api.visual import visual_image
from api.postprocess import data_process
from api.image import get_affine_transform
import MxpiDataType_pb2 as MxpiDataType
from StreamManagerApi import StringVector
from config import config as cfg
from eval.eval_by_sdk import cal_acc
def parser_args():
"""
configuration parameter, input from outside
"""
parser = argparse.ArgumentParser(description="centernet inference")
parser.add_argument("--img_path",
type=str,
required=True,
help="image file path.")
parser.add_argument(
"--pipeline_path",
type=str,
required=False,
default="config/centernet.pipeline",
help="pipeline file path. The default is 'config/centernet.pipeline'. ")
parser.add_argument(
"--infer_mode",
type=str,
required=False,
default="infer",
help=
"infer:only infer, eval: accuracy evaluation. The default is 'infer'.")
parser.add_argument(
"--infer_result_dir",
type=str,
required=False,
default="../data/infer_result",
help=
"cache dir of inference result. The default is '../data/infer_result'."
)
parser.add_argument("--ann_file",
type=str,
required=False,
help="eval ann_file.")
arg = parser.parse_args()
return arg
def process_img(img_file):
"""
Preprocessing the images
"""
mean = np.array([0.40789654, 0.44719302, 0.47026115], dtype=np.float32)
std = np.array([0.28863828, 0.27408164, 0.27809835], dtype=np.float32)
input_size = [512, 512]
img = cv2.imread(img_file)
size = img.shape
inp_width = size[1]
inp_height = size[0]
down_ratio = 4
c = np.array([inp_width / 2., inp_height / 2.], dtype=np.float32)
s = max(inp_height, inp_width) * 1.0
img_metas = {'c': c, 's': s,
'out_height': input_size[0] // down_ratio,
'out_width': input_size[1] // down_ratio}
trans_input = get_affine_transform(c, s, 0, [input_size[0], input_size[1]])
inp_img = cv2.warpAffine(img, trans_input, (cfg.MODEL_WIDTH, cfg.MODEL_HEIGHT), flags=cv2.INTER_LINEAR)
inp_img = (inp_img.astype(np.float32) / 255. - mean) / std
eval_image = inp_img.reshape((1,) + inp_img.shape)
model_img = eval_image.transpose(0, 3, 1, 2)
return model_img, img_metas
def image_inference(pipeline_path, stream_name, img_dir, result_dir):
"""
image inference: get inference for images
"""
sdk_api = SdkApi(pipeline_path)
if not sdk_api.init():
exit(-1)
if not os.path.exists(result_dir):
os.makedirs(result_dir)
img_data_plugin_id = 0
print(f"\nBegin to inference for {img_dir}.\n")
file_list = os.listdir(img_dir)
total_len = len(file_list)
for img_id, file_name in enumerate(file_list):
if not file_name.lower().endswith((".jpg", "jpeg")):
continue
image_name, _ = os.path.splitext(file_name)
file_path = os.path.join(img_dir, file_name)
img_np, meta = process_img(file_path)
sdk_api.send_tensor_input(stream_name,
img_data_plugin_id, "appsrc0",
img_np.tobytes(), img_np.shape, cfg.TENSOR_DTYPE_FLOAT32)
keys = [b"mxpi_tensorinfer0"]
keyVec = StringVector()
for key in keys:
keyVec.push_back(key)
start_time = time.time()
infer_result = sdk_api. get_protobuf(stream_name, 0, keyVec)
end_time = time.time() - start_time
result = MxpiDataType.MxpiTensorPackageList()
result.ParseFromString(infer_result[0].messageBuf)
result = np.frombuffer(result.tensorPackageVec[0].tensorVec[0].dataStr,
dtype='float32').reshape((1, 100, 6))
img_id += 1
output = data_process(result, meta, image_name, cfg.NUM_CLASSES)
print(
f"End-2end inference, file_name: {file_path}, {img_id}/{total_len}, elapsed_time: {end_time}.\n"
)
save_pred_image_path = os.path.join(result_dir, "pred_image")
if not os.path.exists(save_pred_image_path):
os.makedirs(save_pred_image_path)
gt_image = cv2.imread(file_path)
anno = copy.deepcopy(output["annotations"])
visual_image(gt_image, anno, save_pred_image_path, score_threshold=cfg.SCORE_THRESH)
pred_res_file = os.path.join(result_dir, 'infer_{}_result.json').format(image_name)
with open(pred_res_file, 'w+') as f:
json.dump(output["annotations"], f, indent=1)
if __name__ == "__main__":
args = parser_args()
stream_name0 = cfg.STREAM_NAME.encode("utf-8")
print("stream_name0:")
print(stream_name0)
image_inference(args.pipeline_path, stream_name0, args.img_path,
args.infer_result_dir)
if args.infer_mode == "eval":
print("Infer end.")
print("Begin to eval...")
cal_acc(args.ann_file, args.infer_result_dir)
| 35.077381 | 108 | 0.642797 |
17042212e5fa71f184651ff1dc382b9b9ed9f1a9
| 1,278 |
py
|
Python
|
research/cv/MaskedFaceRecognition/config.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/MaskedFaceRecognition/config.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/MaskedFaceRecognition/config.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
network config setting, will be used in train.py and eval.py
"""
from easydict import EasyDict as ed
config = ed({
"class_num": 10572,
"batch_size": 128,
"learning_rate": 0.01,
"lr_decay_epochs": [40, 80, 100],
"lr_decay_factor": 0.1,
"lr_warmup_epochs": 20,
"p": 16,
"k": 8,
"loss_scale": 1024,
"momentum": 0.9,
"weight_decay": 1e-4,
"epoch_size": 120,
"buffer_size": 10000,
"image_height": 128,
"image_width": 128,
"save_checkpoint": True,
"save_checkpoint_steps": 195,
"keep_checkpoint_max": 2,
"save_checkpoint_path": "./"
})
| 31.170732 | 78 | 0.643975 |
ca433cfdc583b5417c142f3e27cbbc19fc20126f
| 636 |
py
|
Python
|
python_first_step/sortalgoArrayOperation/sort2.py
|
cartellefo/projet
|
23c67e847b415fb47f71e830b89a227fffed109b
|
[
"MIT"
] | null | null | null |
python_first_step/sortalgoArrayOperation/sort2.py
|
cartellefo/projet
|
23c67e847b415fb47f71e830b89a227fffed109b
|
[
"MIT"
] | null | null | null |
python_first_step/sortalgoArrayOperation/sort2.py
|
cartellefo/projet
|
23c67e847b415fb47f71e830b89a227fffed109b
|
[
"MIT"
] | null | null | null |
import time
import numpy as np
import numpy.linalg as nl
import random
import matplotlib.pyplot as plt
def sortInt(n_max) :
# summe des carre
listInt=[]
for i in range(1, n_max) :
s = random.randint(1,10)
listInt.append(s)
return(listInt)
intRand= sortInt(5)
print(intRand)
def tri_ins(t):
permut = 0
for k in range(1,len(t)):
temp=t[k]
j=k
while j>0 and temp<t[j-1]:
permut=permut+1
t[j]=t[j-1]
j-=1
t[j]=temp
print(t)
permut= permut + 2*len(t)
return t,permut
x,e=tri_ins(intRand)
| 16.307692 | 34 | 0.542453 |
ca74d344cab07cbfd869efaea460c4c5bc949315
| 316 |
py
|
Python
|
exercises/de/exc_02_05_02.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | 2 |
2020-07-07T01:46:37.000Z
|
2021-04-20T03:19:43.000Z
|
exercises/de/exc_02_05_02.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | null | null | null |
exercises/de/exc_02_05_02.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | null | null | null |
import spacy
nlp = spacy.blank("de")
# Importiere die Klasse Doc
from ____ import ____
# Erwarteter Text: "Na, alles klar?"
words = ["Na", ",", "alles", "klar", "?"]
spaces = [____, ____, ____, ____, ____]
# Erstelle ein Doc mit den Wörtern und Leerzeichen
doc = ____(____, ____=____, ____=____)
print(doc.text)
| 21.066667 | 50 | 0.674051 |
f3daf2d50e0303551f8d62b9050f38bb85c076ce
| 1,562 |
py
|
Python
|
python/pyqt/LearnPyQt/stack_layout.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/LearnPyQt/stack_layout.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/LearnPyQt/stack_layout.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import (
QApplication,
QMainWindow,
QWidget,
QVBoxLayout,
QStackedLayout,
QPushButton,
)
from PyQt5.QtGui import (
QPalette,
QColor,
)
class Color(QWidget):
def __init__(self, color, *args, **kwargs):
super(Color, self).__init__(*args, **kwargs)
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(QPalette.Window, QColor(color))
self.setPalette(palette)
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.color_index = 3
self.setWindowTitle("Jayone's Awesome App")
layout = QVBoxLayout()
layout2 = QStackedLayout()
layout2.addWidget(Color('red'))
layout2.addWidget(Color('green'))
layout2.addWidget(Color('blue'))
layout2.addWidget(Color('yellow'))
layout2.setCurrentIndex(self.color_index)
layout.addLayout(layout2)
self.stack_layout = layout2
push_button = QPushButton('change')
push_button.clicked.connect(self.button_click)
layout.addWidget(push_button)
widget = QWidget()
widget.setLayout(layout)
self.setCentralWidget(widget)
def button_click(self):
self.color_index += 1
if self.color_index > 3:
self.color_index = 0
self.stack_layout.setCurrentIndex(self.color_index)
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| 23.313433 | 59 | 0.644686 |
f3fe9cadd102e32ca06c472d5c44ec2934c88eb6
| 4,592 |
py
|
Python
|
research/cv/PGAN/src/optimizer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/PGAN/src/optimizer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/PGAN/src/optimizer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Gnet define"""
from mindspore import ops
from mindspore import nn
from mindspore.ops import constexpr
import mindspore
import numpy as np
@constexpr
def generate_tensor(batch_size):
"""generate_tensor
Returns:
output.
"""
np_array = np.random.randn(batch_size, 1, 1, 1)
return mindspore.Tensor(np_array, mindspore.float32)
class GradientWithInput(nn.Cell):
"""GradientWithInput"""
def __init__(self, discrimator):
super(GradientWithInput, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.discrimator = discrimator
def construct(self, interpolates, alpha):
"""GradientWithInput
Returns:
output.
"""
decisionInterpolate = self.discrimator(interpolates, alpha)
decisionInterpolate = self.reduce_sum(decisionInterpolate, 0)
return decisionInterpolate
class WGANGPGradientPenalty(nn.Cell):
"""WGANGPGradientPenalty"""
def __init__(self, discrimator, lambdaGP=10):
super(WGANGPGradientPenalty, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.reduce_sum_keep_dim = ops.ReduceSum(keep_dims=True)
self.sqrt = ops.Sqrt()
self.discrimator = discrimator
self.gradientWithInput = GradientWithInput(discrimator)
self.lambdaGP = mindspore.Tensor(lambdaGP, mindspore.float32)
self.gradient_op = ops.GradOperation()
def construct(self, input_x, fake, input_alpha):
"""WGANGPGradientPenalty
Returns:
output.
"""
batch_size = input_x.shape[0]
alpha = generate_tensor(batch_size)
alpha = alpha.expand_as(input_x)
interpolates = alpha * input_x + ((1 - alpha) * fake)
gradient = self.gradient_op(self.gradientWithInput)(interpolates, input_alpha)
gradient = ops.reshape(gradient, (batch_size, -1))
gradient = self.sqrt(self.reduce_sum(gradient * gradient, 1))
gradient_penalty = self.reduce_sum_keep_dim((gradient - 1.0) ** 2) * self.lambdaGP
return gradient_penalty
class AllLossD(nn.Cell):
"""AllLossD"""
def __init__(self, netD):
super(AllLossD, self).__init__()
self.netD = netD
self.wGANGPGradientPenalty = WGANGPGradientPenalty(self.netD)
self.reduce_sum = ops.ReduceSum()
self.epsilonLoss = EpsilonLoss(0.001)
self.scalr_summary = ops.ScalarSummary()
self.summary = ops.TensorSummary()
def construct(self, real, fake, alpha):
"""AllLossD
Returns:
output.
"""
predict_real = self.netD(real, alpha)
loss_real = -self.reduce_sum(predict_real, 0)
predict_fake = self.netD(fake, alpha)
loss_fake = self.reduce_sum(predict_fake, 0)
lossD_Epsilon = self.epsilonLoss(predict_real)
lossD_Grad = self.wGANGPGradientPenalty(real, fake, alpha)
all_loss = loss_real + loss_fake + lossD_Grad + lossD_Epsilon
return all_loss
class AllLossG(nn.Cell):
"""AllLossG"""
def __init__(self, netG, netD):
super(AllLossG, self).__init__()
self.netG = netG
self.netD = netD
self.reduce_sum = ops.ReduceSum()
def construct(self, inputNoise, alpha):
"""AllLossG
Returns:
output.
"""
fake = self.netG(inputNoise, alpha)
predict_fake = self.netD(fake, alpha)
loss_fake = -self.reduce_sum(predict_fake, 0)
return loss_fake
class EpsilonLoss(nn.Cell):
"""EpsilonLoss"""
def __init__(self, epsilonD):
super(EpsilonLoss, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.epsilonD = mindspore.Tensor(epsilonD, mindspore.float32)
def construct(self, predRealD):
"""EpsilonLoss
Returns:
output.
"""
return self.reduce_sum(predRealD ** 2) * self.epsilonD
| 31.027027 | 90 | 0.647213 |
b679e07e4853c1ac8702e21a433f7100a58636c7
| 1,553 |
py
|
Python
|
dblp/python/citations.py
|
DocSeven/spark
|
a88330f554a4afc70696dac8d00bcf4d2f512acf
|
[
"Apache-2.0"
] | null | null | null |
dblp/python/citations.py
|
DocSeven/spark
|
a88330f554a4afc70696dac8d00bcf4d2f512acf
|
[
"Apache-2.0"
] | null | null | null |
dblp/python/citations.py
|
DocSeven/spark
|
a88330f554a4afc70696dac8d00bcf4d2f512acf
|
[
"Apache-2.0"
] | 1 |
2019-11-06T11:29:31.000Z
|
2019-11-06T11:29:31.000Z
|
import citationsCommon
def countByIdAndYear(rdd):
docsplit = rdd.flatMap(lambda row:
[('{}.{}'.format(ref, row[2]), 1) for ref in row[1]])
return docsplit.reduceByKey(lambda c, d: c + d)
def joinIdYearAge(idYearCount, ddpairs):
# idYear: id, year cited
idYear = idYearCount.map(lambda row: (row[0][:-5], int(row[0][-4:])))
# ddpairs is expected to be: id, year published
# idYearAge: id, year cited - year published
return idYear.join(ddpairs).filter(lambda row: (row[1][0] - row[1][1] >= -2)).map(
lambda row: ('{}.{}'.format(row[0], row[1][0]), (row[1][0] - row[1][1])))
def citationCountArrays(idYearAge, idYearCount):
p2Afunc = citationsCommon.pairsToArrayHelper.pairsToArray
return idYearAge.join(idYearCount).map(
lambda row: (row[0][:-5], [(row[1][0], row[1][1])])).reduceByKey(
lambda c, d: c + d).mapValues(lambda x: p2Afunc(x))
# df is the dataframe read from json before we've filtered out rows where
# references is NULL
# partitionCount says how many partitions to coalesce the intermediate
# data to.
def citationCountsE2E(df, partitionCount=34):
dd = df.select("id", "references", "year").filter("references is not NULL").rdd
idYearCount = countByIdAndYear(dd)
# For publication dates, include publications with no references.
idYearAge = joinIdYearAge(idYearCount, df.select("id", "year").rdd)
citCountArrays = citationCountArrays(idYearAge.coalesce(partitionCount),
idYearCount)
return citCountArrays
| 41.972973 | 86 | 0.666452 |
1e61c19b1fa849a545aa5955ebc1129a3e165719
| 284 |
py
|
Python
|
zencad/interactive/axis.py
|
Spiritdude/zencad
|
4e63b1a6306dd235f4daa2791b10249f7546c95b
|
[
"MIT"
] | 5 |
2018-04-11T14:11:40.000Z
|
2018-09-12T19:03:36.000Z
|
zencad/interactive/axis.py
|
Spiritdude/zencad
|
4e63b1a6306dd235f4daa2791b10249f7546c95b
|
[
"MIT"
] | null | null | null |
zencad/interactive/axis.py
|
Spiritdude/zencad
|
4e63b1a6306dd235f4daa2791b10249f7546c95b
|
[
"MIT"
] | null | null | null |
from zencad.interactive.interactive_object import InteractiveObject
from OCC.Core.AIS import AIS_Axis
class AxisInteractiveObject(InteractiveObject):
def __init__(self, axis, color):
self.axis = axis
super().__init__(AIS_Axis(axis.to_Geom_Line()), color=color)
| 28.4 | 68 | 0.760563 |
1ebbf6764706c37fc954f30837690895994c3e65
| 1,759 |
py
|
Python
|
example_code/python/plot_curve_fit.py
|
NicoJG/PraktikumPhysikRepoVorlage
|
b29a4302958edc6205e2b107f7253f614cea0181
|
[
"MIT"
] | 1 |
2021-08-21T17:08:39.000Z
|
2021-08-21T17:08:39.000Z
|
example_code/python/plot_curve_fit.py
|
NicoJG/PraktikumPhysikRepoVorlage
|
b29a4302958edc6205e2b107f7253f614cea0181
|
[
"MIT"
] | null | null | null |
example_code/python/plot_curve_fit.py
|
NicoJG/PraktikumPhysikRepoVorlage
|
b29a4302958edc6205e2b107f7253f614cea0181
|
[
"MIT"
] | null | null | null |
# Erstelle aus gegebnen Daten eine Ausgleichskurve
# Und Plotte diese Kurve + die Daten
# wechsle die Working Directory zum Versuchsordner, damit das Python-Script von überall ausgeführt werden kann
import os,pathlib
project_path = pathlib.Path(__file__).absolute().parent.parent
os.chdir(project_path)
# benutze die matplotlibrc und header-matplotlib.tex Dateien aus dem default Ordner
os.environ['MATPLOTLIBRC'] = str(project_path.parent/'default'/'python'/'matplotlibrc')
os.environ['TEXINPUTS'] = str(project_path.parent/'default'/'python')+':'
# Imports
import numpy as np
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
# Daten einlesen
x,y,z = np.genfromtxt('data/NAME.csv',delimiter=',',skip_header=1,unpack=True)
# Ausgleichskurven Funktion (hier Ausgleichsgerade)
def f(x,a,b):
return a*x + b
# oder als Lambda Funktion
f = lambda x,a,b: a*x + b
# Ausgleichskurve berechnen
params,pcov = curve_fit(f,x,y)
# Parameter
a = params[0]
b = params[1]
# Unsicherheiten
a_err = np.absolute(pcov[0][0])**0.5
b_err = np.absolute(pcov[1][1])**0.5
# Werte irgendwie ausgeben lassen
# z.B. mit print, aber besser als JSON Datei abspeichern
print(f'{a = }+-{a_err}')
print(f'{b = :.2f}+-{b_err:.2f}')
# Plot der Ausgleichskurve
x_linspace = np.linspace(np.min(x), np.max(x), 100)
plt.plot(x_linspace, f(x_linspace,*params), 'k-', label='Ausgleichskurve')
# Plot der Daten
plt.plot(x, y, 'ro', label='Daten')
# Achsenbeschriftung mit LaTeX (nur wenn matplotlibrc benutzt wird)
plt.xlabel(r'$\alpha \:/\: \si{\ohm}$')
plt.ylabel(r'$y \:/\: \si{\micro\joule}$')
# in matplotlibrc leider (noch) nicht möglich
plt.legend()
plt.tight_layout(pad=0, h_pad=1.08, w_pad=1.08)
# Plot als PDF speichern
plt.savefig('build/plot_NAME.pdf')
| 30.859649 | 110 | 0.728823 |
eca4f5aa27488a24c929f36854a3145b768fa867
| 3,266 |
py
|
Python
|
benwaonline/auth/core.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | null | null | null |
benwaonline/auth/core.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | 16 |
2017-09-13T10:21:40.000Z
|
2020-06-01T04:32:22.000Z
|
benwaonline/auth/core.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | null | null | null |
import os
import requests
from flask import current_app
from jose import jwt, exceptions
from benwaonline.cache import cache
from benwaonline.exceptions import BenwaOnlineAuthError
ALGORITHMS = ['RS256']
def verify_token(token):
unverified_header = jwt.get_unverified_header(token)
rsa_key = match_key_id(unverified_header)
try:
payload = jwt.decode(
token,
rsa_key,
algorithms=ALGORITHMS,
audience=current_app.config['API_AUDIENCE'],
issuer=current_app.config['ISSUER']
)
except jwt.ExpiredSignatureError as err:
handle_expired_signature(err)
except jwt.JWTClaimsError as err:
handle_claims(err)
except exceptions.JWTError as err:
handle_jwt(err)
except Exception as err:
handle_non_jwt()
return payload
def match_key_id(unverified_header):
"""Checks if the RSA key id given in the header exists in the JWKS."""
jwks = get_jwks()
rsa_keys = [
rsa_from_jwks(key)
for key in jwks["keys"]
if key["kid"] == unverified_header["kid"]
]
try:
return rsa_keys[0]
except IndexError:
return None
def rsa_from_jwks(key):
return {
"kty": key["kty"],
"kid": key["kid"],
"use": key["use"],
"n": key["n"],
"e": key["e"]
}
def handle_claims(err):
"""Handles tokens with invalid claims"""
raise BenwaOnlineAuthError(
detail='{0}'.format(err),
title='invalid claim',
status=401
)
def handle_expired_signature(err):
"""Handles tokens with expired signatures."""
raise err
def handle_jwt(err):
"""Handles tokens with other jwt-related issues."""
raise BenwaOnlineAuthError(
detail='{0}'.format(err),
title='invalid signature',
status=401
)
def handle_non_jwt():
"""Handles everything else."""
raise BenwaOnlineAuthError(
title='invalid header',
detail='unable to parse authentication token'
)
@cache.cached(timeout=48 * 3600, key_prefix='jwks')
def get_jwks():
try:
msg = 'JWKS not cached - requesting from {}'.format(current_app.config['JWKS_URL'])
current_app.logger.debug(msg)
jwksurl = requests.get(current_app.config['JWKS_URL'], timeout=5)
except requests.exceptions.Timeout:
raise BenwaOnlineAuthError(
title='JWKS Request Timed Out',
detail='the authentication server is unavailable, or another issue has occured',
status=500
)
return jwksurl.json()
def has_scope(scope, token):
unverified_claims = jwt.get_unverified_claims(token)
token_scopes = unverified_claims['scope'].split()
return True if scope in token_scopes else False
def refresh_token_request(client, refresh_token):
data = {
'grant_type': 'refresh_token',
'refresh_token': refresh_token,
'client_id': client.consumer_key,
'client_secret': client.consumer_secret
}
msg = 'Attempting to refresh token at {}'.format(client.base_url + client.access_token_url)
current_app.logger.debug(msg)
resp = requests.post(client.base_url + client.access_token_url, data=data)
return resp.json()
| 27.91453 | 95 | 0.649418 |
01f72bd21f2a381c2c81de43a8ad15b68badbae6
| 4,917 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/module_utils/cloudscale.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/module_utils/cloudscale.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/module_utils/cloudscale.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# (c) 2017, Gaudenz Steinlin <[email protected]>
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from copy import deepcopy
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_text
API_URL = 'https://api.cloudscale.ch/v1/'
def cloudscale_argument_spec():
return dict(
api_token=dict(fallback=(env_fallback, ['CLOUDSCALE_API_TOKEN']),
no_log=True,
required=True,
type='str'),
api_timeout=dict(default=30, type='int'),
)
class AnsibleCloudscaleBase(object):
def __init__(self, module):
self._module = module
self._auth_header = {'Authorization': 'Bearer %s' % module.params['api_token']}
self._result = {
'changed': False,
'diff': dict(before=dict(), after=dict()),
}
def _get(self, api_call):
resp, info = fetch_url(self._module, API_URL + api_call,
headers=self._auth_header,
timeout=self._module.params['api_timeout'])
if info['status'] == 200:
return self._module.from_json(to_text(resp.read(), errors='surrogate_or_strict'))
elif info['status'] == 404:
return None
else:
self._module.fail_json(msg='Failure while calling the cloudscale.ch API with GET for '
'"%s".' % api_call, fetch_url_info=info)
def _post_or_patch(self, api_call, method, data):
# This helps with tags when we have the full API resource href to update.
if API_URL not in api_call:
api_endpoint = API_URL + api_call
else:
api_endpoint = api_call
headers = self._auth_header.copy()
if data is not None:
# Sanitize data dictionary
# Deepcopy: Duplicate the data object for iteration, because
# iterating an object and changing it at the same time is insecure
for k, v in deepcopy(data).items():
if v is None:
del data[k]
data = self._module.jsonify(data)
headers['Content-type'] = 'application/json'
resp, info = fetch_url(self._module,
api_endpoint,
headers=headers,
method=method,
data=data,
timeout=self._module.params['api_timeout'])
if info['status'] in (200, 201):
return self._module.from_json(to_text(resp.read(), errors='surrogate_or_strict'))
elif info['status'] == 204:
return None
else:
self._module.fail_json(msg='Failure while calling the cloudscale.ch API with %s for '
'"%s".' % (method, api_call), fetch_url_info=info)
def _post(self, api_call, data=None):
return self._post_or_patch(api_call, 'POST', data)
def _patch(self, api_call, data=None):
return self._post_or_patch(api_call, 'PATCH', data)
def _delete(self, api_call):
resp, info = fetch_url(self._module,
API_URL + api_call,
headers=self._auth_header,
method='DELETE',
timeout=self._module.params['api_timeout'])
if info['status'] == 204:
return None
else:
self._module.fail_json(msg='Failure while calling the cloudscale.ch API with DELETE for '
'"%s".' % api_call, fetch_url_info=info)
def _param_updated(self, key, resource):
param = self._module.params.get(key)
if param is None:
return False
if resource and key in resource:
if param != resource[key]:
self._result['changed'] = True
patch_data = {
key: param
}
self._result['diff']['before'].update({key: resource[key]})
self._result['diff']['after'].update(patch_data)
if not self._module.check_mode:
href = resource.get('href')
if not href:
self._module.fail_json(msg='Unable to update %s, no href found.' % key)
self._patch(href, patch_data)
return True
return False
def get_result(self, resource):
if resource:
for k, v in resource.items():
self._result[k] = v
return self._result
| 36.969925 | 106 | 0.548709 |
bf3a5e972de95e03358433c9a82b2ed12f784caf
| 1,025 |
py
|
Python
|
Hackerrank_problems/counting_valleys/solution1_CountingValleys.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 165 |
2020-10-03T08:01:11.000Z
|
2022-03-31T02:42:08.000Z
|
Hackerrank_problems/counting_valleys/solution1_CountingValleys.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 383 |
2020-10-03T07:39:11.000Z
|
2021-11-20T07:06:35.000Z
|
Hackerrank_problems/counting_valleys/solution1_CountingValleys.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 380 |
2020-10-03T08:05:04.000Z
|
2022-03-19T06:56:59.000Z
|
#
# working
# this function takes steps and detail of steps as input
# we declare a zero level above which any up step will be considered as a valley that is climbed
# we run from 0 to length of steps
# if we detect a "U" we increase the zero level
# similary if we detect "D" we decrease the zero level
# thus calculating the net valley value
def FindValleys(t, steps):
pass
zeroLevel = 0
Valley = 0
for i in range(t):
if steps[i] == "U":
zeroLevel = zeroLevel + 1
else:
zeroLevel = zeroLevel - 1
if steps[i] == "U" and zeroLevel ==0:
Valley = Valley + 1
return Valley
# drive code
# this code takes number os steps and details of steps as input
# where steps will be given in "U" for step up and "D" for step down
# we pass this data to Function FindValleys()
if __name__ == "__main__":
t = int(input())
steps = list(map(int, input().strip().split()))
print(FindValleys(t, steps))
| 26.282051 | 100 | 0.616585 |
da69d942b34c7cb188f48d8f571305a3929e1a1b
| 95 |
py
|
Python
|
abfahrt/unittest/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | 1 |
2022-01-30T14:30:02.000Z
|
2022-01-30T14:30:02.000Z
|
abfahrt/unittest/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
abfahrt/unittest/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
"""
Unittest-Package for testing the most important classes/modules of the abfahrt-Package
"""
| 23.75 | 86 | 0.778947 |
e5f095d1388f9625e63f8d8fbeb39317cd585f8c
| 68 |
py
|
Python
|
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-009.01-list/ph-9.11-list-del-function.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-009.01-list/ph-9.11-list-del-function.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-009.01-list/ph-9.11-list-del-function.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
li = [1, 2, 3, 3, 4, 5, 6]
del(li[1])
print(li)
del(li)
print(li)
| 8.5 | 26 | 0.5 |
97742d434f756859171175e1ee026361918f8086
| 3,151 |
py
|
Python
|
blueprints/portfolio/build.py
|
andrenasturas/hausse
|
58e7cb71d5105cf1d6ec7d294e85668855bf8336
|
[
"MIT"
] | null | null | null |
blueprints/portfolio/build.py
|
andrenasturas/hausse
|
58e7cb71d5105cf1d6ec7d294e85668855bf8336
|
[
"MIT"
] | 1 |
2021-08-30T21:41:46.000Z
|
2021-08-30T21:41:46.000Z
|
blueprints/portfolio/build.py
|
andrenasturas/hausse
|
58e7cb71d5105cf1d6ec7d294e85668855bf8336
|
[
"MIT"
] | 1 |
2021-08-31T19:27:32.000Z
|
2021-08-31T19:27:32.000Z
|
from hausse import Hausse
from hausse.plugins import (
Assets,
DiscoverPartials,
Drop,
Handlebars,
Markdown,
MetadataMarkdown,
Relations,
Collection,
Collections
)
# Collections preparations
# By default, all files in "src/formations" folder will be grouped in this collection
Links = Collection("links")
# Using `indexBy` enables indexation, which is useful for building relations
Projects = Collection("projects", indexBy="title")
Skills = Collection("skills", indexBy="name")
h = Hausse("examples/portfolio")
h.clean()
h.use(
# `use()` method register plugins into the Hausse project
# It is possible to call `use()` once or multiple times, with one or a list of Plugins
# In any cases, Plugins will be called in order.
[
# Assets plugin is used to simply dump static files, like stylesheets or icons
# As it bypass all other plugins by copying directly files in "dist" folder,
# it does not retrives files from "src/assets" but directly from "assets"
Assets("assets"),
# Markdown parses all markdown files found in "src"
# Note that this plugin will also load as metadata all key-values present in headers
Markdown(),
# MetadataMarkdown parses markdown string found in files metadata
MetadataMarkdown("summary"),
# Collections (with a s) auto-creates collections based on files' "collections" metadata
Collections(),
# Each of the following defines a Collection and fill it with according files
Links,
Skills,
Projects,
# Relations helps making links between files in different collections
# That's why Collections have been defined before Hausse() call
# Other solution is to use CollectionSelector(collection_name) instead of the Collection
Relations(Projects, Skills),
# DiscoverPartials registers partials templates for Handlebars layout processing
DiscoverPartials("templates"),
# Handlebars does the actual layout processing to html files
Handlebars("layouts", "layout.hbs", "index.md"),
# Drop removes useless files from the project, before writing them in "dist"
# Note that it does not remove the actual files from "src" folder
# Here, it is used because we build a single page from multiple markdown files
# Once the layout plugin processed them, used markdown files are no longer wanted
Drop("*.md"),
]
)
# And here the magic happens. When `build()` is called, Hausse project generation begins
# Files from "src" directory are loaded and stored in a elements structure
# Every registered Plugin is called in order on the same set of elements, metadata and settings
# When all Plugins have been called, all files from elements are written in "dist" directory
h.build()
# Save will store the Hausse project configuration into a `hausse.json` file,
# which can be used later by Hausse in CLI mode operation : `python -m hausse
# hausse.json`. It is useful to simplify the project setup when development is
# done and it goes to production.
h.save()
| 41.460526 | 96 | 0.70676 |
8ae079fb5cc787087019878ddc15a079fc9ed4df
| 2,165 |
py
|
Python
|
elements/python/9/14/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | 11 |
2019-02-08T06:54:34.000Z
|
2021-08-07T18:57:39.000Z
|
elements/python/9/14/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | 1 |
2019-05-21T08:14:10.000Z
|
2019-05-21T08:14:10.000Z
|
elements/python/9/14/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | null | null | null |
import collections
class MaxQueue(object):
"""
MaxQueue provides a base queue API and tracks the largest item contained
in the queue.
"""
def __init__(self):
self.items = []
self.peaks = []
def enqueue(self, x):
self.items.append(x)
while len(self.peaks) > 0 and self.peaks[-1] < x:
self.peaks.pop(-1)
self.peaks.append(x)
def dequeue(self):
if self.empty():
return None
x = self.items.pop(0)
if x == self.peaks[0]:
self.peaks.pop(0)
return x
def max(self):
return self.peaks[0] if not self.empty() else None
def empty(self):
return len(self.items) == 0
TimestampValue = collections.namedtuple('TimestampValue', ['timestamp', 'value'])
def max_rolling_window(points, window_length):
q = MaxQueue()
maxima = []
t = 0
tail = 0
head = 0
while t <= points[-1].timestamp:
while head < len(points) and points[head].timestamp <= t:
q.enqueue(points[head].value)
head += 1
while points[tail].timestamp < t - window_length:
q.dequeue()
tail += 1
maxima.append(TimestampValue(
timestamp=t,
value=q.max(),
))
t += 1
return maxima
def test():
"""
test example from figure 9.4
"""
points = [
1.3, None, 2.5, 3.7,
None, 1.4, 2.6, None,
2.2, 1.7, None, None,
None, None, 1.7
]
maxima = [
1.3, 1.3, 2.5, 3.7,
3.7, 3.7, 3.7, 2.6,
2.6, 2.6, 2.2, 2.2,
1.7, None, 1.7,
]
timestamped_points = []
for t, p in enumerate(points):
if p is not None:
timestamped_points.append(TimestampValue(
timestamp=t,
value=p,
))
results = max_rolling_window(timestamped_points, 3)
for t, r in enumerate(results):
assert r.timestamp == t
assert maxima[t] == r.value
print 'pass'
def main():
test()
if __name__ == '__main__':
main()
| 23.031915 | 81 | 0.50485 |
c12ff6fc86fc337cb5664cef1cd543659806c57c
| 600 |
py
|
Python
|
staris.py
|
aertoria/MiscCode
|
a2e94d0fe0890e6620972f84adcb7976ca9f1408
|
[
"Apache-2.0"
] | null | null | null |
staris.py
|
aertoria/MiscCode
|
a2e94d0fe0890e6620972f84adcb7976ca9f1408
|
[
"Apache-2.0"
] | null | null | null |
staris.py
|
aertoria/MiscCode
|
a2e94d0fe0890e6620972f84adcb7976ca9f1408
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#You are climbing a stair case. It takes n steps to reach to the top.
#Each time you can either climb 1 or 2 steps. In how many distinct ways can you climb to the top?
class Solution:
# @param {integer} n
# @return {integer}
count=0
def climbStairs(self, n):
self.rec_climb(n)
print self.count
return self.count
def rec_climb(self, n):
if n==0:
#print 'yeah success'
self.count=self.count+1
elif n<0:
#print 'cannot climb this way'
pass
else:
self.rec_climb(n-1)
self.rec_climb(n-2)
solution=Solution()
solution.climbStairs(35)
| 18.181818 | 97 | 0.675 |
a9ee7d9ddb02712f3efcfc4263b4574071d89f40
| 237 |
py
|
Python
|
src/server/handlers/issues.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 156 |
2021-11-19T18:50:14.000Z
|
2022-03-31T19:48:59.000Z
|
src/server/handlers/issues.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 30 |
2021-12-27T19:30:56.000Z
|
2022-03-30T17:49:00.000Z
|
src/server/handlers/issues.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 14 |
2022-01-17T23:24:34.000Z
|
2022-03-29T09:27:47.000Z
|
from server.handlers.base import ListResource
from server.models import Issue
class IssueListResource(ListResource):
@property
def resource(self):
return Issue
@property
def key(self):
return "issues"
| 16.928571 | 45 | 0.696203 |
68c4d76626da0bce8c26017b492f398cab45c0ab
| 18,560 |
py
|
Python
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/gto_point.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 3 |
2019-06-18T15:28:09.000Z
|
2019-07-11T07:31:45.000Z
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/gto_point.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 2 |
2019-07-11T14:03:25.000Z
|
2021-02-08T16:14:04.000Z
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/gto_point.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 1 |
2019-06-12T11:07:37.000Z
|
2019-06-12T11:07:37.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
# QDoubleValidator needs QValidator in qgis 3.4!
from PyQt5.QtCore import Qt, QLocale, pyqtSignal
from PyQt5.QtGui import QDoubleValidator
from PyQt5.QtWidgets import QWidget, QLabel, QLineEdit, QHBoxLayout, QToolButton, QToolBar, QComboBox, QDoubleSpinBox
from PyQt5 import uic
from qgis.core import QgsProject, QgsCoordinateReferenceSystem, QgsPointXY, QgsCoordinateTransform, QgsVectorLayerUtils, \
QgsWkbTypes, QgsGeometry
from qgis.gui import QgsProjectionSelectionWidget, QgsVertexMarker
import os
from .gto_point_tool import GTOPointTool
class GTOPointWidget(QWidget):
isActive = pyqtSignal(bool)
def __init__(self, gtoObj, parent=None):
super(GTOPointWidget, self).__init__(parent)
self.gtomain = gtoObj.gtomain
self.info = self.gtomain.info
self.debug = self.gtomain.debug
try:
# references
self.helper = self.gtomain.helper
self.iface = self.gtomain.iface
self.prj = QgsProject.instance()
self.canvas = self.iface.mapCanvas()
# references
self.x = 0
self.y = 0
self.xt = 0
self.yt = 0
self.snaped = False
self.crs_transform = None
self.crs_layer = None
self.parent_widget = None # e.g toolbar
self.userEditX = False
self.userEditY = False
# config
self.tools = []
self.coordinatereferences = None
self.scale = 0
self.center = True
self.enable_save = False
self.precision = -1
self.cboCoordSystems = None
self.is_widgetaction = False
self.show_tool_button = False
self.addpoint_attributes = {}
self.tools_after_addpoint = []
# widgets:
uic.loadUi(os.path.join(os.path.dirname(__file__), 'gto_point.ui'), self)
# point tool
self.btnPointTool = self.btnPoint
# x
self.coordX = self.coordX
# self.validX = QDoubleValidator(sys.float_info.min, sys.float_info.max, 16, self.coordX) # no negative numbers possible?
# self.validX = QDoubleValidator(-999999999, 999999999, 16, self.coordX) # working but no range limit
self.validX = QDoubleValidator(self.coordX) # so we use the standard:
self.validX.setNotation(QDoubleValidator.StandardNotation) # By default, this property is set to ScientificNotation: i.e. 1.5E-2 is possible
self.coordX.setValidator(self.validX)
self.btnCopyXt = self.btnCopyXt
self.lblX = self.lblX
# y
self.coordY = self.coordY
self.validY = QDoubleValidator(self.coordY)
self.validY.setNotation(QDoubleValidator.StandardNotation)
self.coordY.setValidator(self.validY)
self.btnCopyYt = self.btnCopyYt
self.lblY = self.lblY
# show
self.btnShow = self.btnShow
self.btnShow.setIcon(self.helper.getIcon('mActionZoomPoint.png'))
# add point
self.btnAddPoint = self.btnAddPoint
self.btnAddPoint.setIcon(self.helper.getIcon('mActionAddPoint.png'))
self.btnAddPoint.setToolTip("Punkt erstellen")
# marker
self.marker = QgsVertexMarker(self.canvas)
self.marker.setColor(Qt.yellow)
self.marker.setIconType(QgsVertexMarker.ICON_CROSS)
self.marker.setIconSize(10)
self.marker.setPenWidth(3)
# See the enum IconType from http://www.qgis.org/api/classQgsVertexMarker.html
# maptool
self.mapTool = GTOPointTool(self.iface, self.canvas)
self.mapTool.isActive.connect(self.setToolStatus)
self.mapTool.canvasReleased.connect(self.setCoords)
# signals
# QToolButton.toggled()
self.btnPoint.clicked.connect(self.setMapTool)
# self.coordX.textChanged.connect(self.set_user_editX)
# self.coordY.textChanged.connect(self.set_user_editY)
self.coordX.textEdited.connect(self.set_user_editX)
self.coordY.textEdited.connect(self.set_user_editY)
# self.coordX.editingFinished.connect(self.check_coords)
# self.coordY.editingFinished.connect(self.check_coords)
self.btnShow.clicked.connect(self.showCoordinate)
self.btnCopyXt.clicked.connect(self.copyXt)
self.btnCopyYt.clicked.connect(self.copyYt)
self.btnAddPoint.clicked.connect(self.add_point)
self.prj.crsChanged.connect(self.prj_crs_changed)
self.iface.mapCanvas().currentLayerChanged.connect(self.layer_changed)
except Exception as e:
self.info.err(e)
def set_user_editX(self, *args):
try:
if self.debug: self.info.log("set_user_editX")
self.userEditX = True
self.marker.hide()
self.marker.setColor(Qt.blue)
self.snaped = False
except Exception as e:
self.info.err(e)
def set_user_editY(self, *args):
try:
if self.debug: self.info.log("set_user_editY")
self.userEditY = True
self.marker.hide()
self.marker.setColor(Qt.blue)
self.snaped = False
except Exception as e:
self.info.err(e)
def reset_user_edit(self):
if self.debug: self.info.log("reset_user_edit")
self.userEditX = False
self.userEditY = False
def check_coords(self):
try:
self.marker.hide()
if self.debug: self.info.log("useredit: X:", self.userEditX, "userEditY:", self.userEditY)
if self.coordX.text() == '':
self.coordX.setText('0')
self.x = 0
if self.coordY.text() == '':
self.coordY.setText('0')
self.y = 0
if self.userEditX or self.userEditY:
self.snaped = False
self.userEditX = False
self.userEditY = False
self.xt = float(self.coordX.text().replace(",", "."))
self.yt = float(self.coordY.text().replace(",", "."))
tr = QgsCoordinateTransform(self.crs_transform, self.prj.crs(), self.prj)
trPoint = tr.transform(QgsPointXY(self.xt, self.yt))
self.x = trPoint.x()
self.y = trPoint.y()
if self.debug: self.info.log("check_coords:", self.x, "/", self.y, "/snaped:", self.snaped)
except Exception as e:
self.info.err(e)
def setMapTool(self):
try:
self.canvas.setMapTool(self.mapTool)
except Exception as e:
self.info.err(e)
def set_parent_widget(self, widget):
try:
self.parent_widget = widget
if self.parent_widget.action.isChecked():
self.setMapTool()
except Exception as e:
self.info.err(e)
def setToolStatus(self, isActive):
try:
self.btnPoint.setChecked(isActive)
self.marker.hide()
self.isActive.emit(isActive)
if self.parent_widget is not None:
self.parent_widget.set_status(isActive)
except Exception as e:
self.info.err(e)
def setConfig(self, config):
try:
self.tools = config.get("tools", [])
self.coordinatereferences = config.get("coordinatereferences", None)
self.scale = config.get("scale", 0)
self.center = config.get("center", True)
self.enable_save = config.get('enable_save', False)
self.precision = config.get('precision', -1)
self.is_widgetaction = config.get('is_widgetaction', False)
self.show_tool_button = config.get('show_tool_button', not self.is_widgetaction)
self.addpoint_attributes = config.get("addpoint_attributes", {})
self.tools_after_addpoint = config.get("tools_after_addpoint", [])
if self.precision < 0:
self.precision, type_conversion_ok = self.prj.readNumEntry("PositionPrecision", "DecimalPlaces", 3)
# labels:
self.lblX.setText(config.get('label_x', 'X:'))
self.lblY.setText(config.get('label_y', 'Y:'))
# text
text = ''
if self.scale > 0 and self.center:
text = "Auf Koordinate zentrieren, Maßstab: " + str(self.scale)
elif self.center:
text = "Auf Koordinate zentrieren"
elif self.scale > 0:
text = "Maßstab: " + str(self.scale)
elif len(self.tools) > 0:
text = self.tools[0]
act = self.gtomain.findAction(self.tools[0])
if act is not None:
text = act.toolTip()
if act.icon() is not None:
self.btnShow.setIcon(act.icon())
if self.debug: self.info.log(text)
self.btnShow.setToolTip(text)
if self.btnShow.toolTip() == '':
self.btnShow.setHidden(True)
# add point
self.btnAddPoint.setHidden(not self.enable_save)
# point tool
self.btnPointTool.setHidden(not self.show_tool_button)
except Exception as e:
self.info.err(e)
def added(self): # widget was added to parent
try:
self.crs_transform = self.prj.crs()
self.crs_layer = self.iface.activeLayer().crs()
# set crs widget
if self.coordinatereferences is None:
# qgis transform
self.cboCoordSys.setHidden(True)
self.cboCoordSystems = self.mQgsProjectionSelectionWidget
self.cboCoordSystems.setMinimumWidth(460)
self.cboCoordSystems.setOptionVisible(QgsProjectionSelectionWidget.ProjectCrs, True)
self.cboCoordSystems.setCrs(self.prj.crs())
self.setCrs(self.cboCoordSystems.crs())
self.cboCoordSystems.crsChanged.connect(self.setCrs)
else:
# custom transform
self.mQgsProjectionSelectionWidget.setHidden(True)
self.cboCoordSystems = self.cboCoordSys
self.cboCoordSystems.setMinimumWidth(400)
self.cboCoordSystems.currentIndexChanged.connect(
lambda: self.setCrs(self.cboCoordSystems.currentData()))
self.cboCoordSystems.addItem(
"Projekt CRS: " + self.crs_transform.authid() + " - " + self.crs_transform.description(),
self.crs_transform)
for crsID in self.coordinatereferences:
try:
crs = QgsCoordinateReferenceSystem(crsID)
self.cboCoordSystems.addItem(crs.authid() + " - " + crs.description(), crs)
except Exception as e:
self.info.err(e)
self.cboCoordSystems.setCurrentIndex(0)
# here we know which type is cboCoordSystems!
self.setIconSizes()
except Exception as e:
self.info.err(e)
def setIconSizes(self):
try:
if self.parentWidget() is not None:
btns = self.findChildren(QToolButton)
for btn in btns:
try:
btn.setIconSize(self.iface.iconSize(False))
except:
pass
# help for the QGIS widget :S
self.cboCoordSystems.setMaximumHeight(self.cboCoordSys.height())
btns = self.cboCoordSystems.findChildren(QToolButton)
for btn in btns:
btn.setIconSize(self.iface.iconSize(False))
except Exception as e:
self.info.err(e)
def layer_changed(self, layer):
try:
if layer.geometryType() == QgsWkbTypes.GeometryType.PointGeometry:
self.btnAddPoint.setEnabled(True)
else:
self.btnAddPoint.setEnabled(False)
except Exception as e:
self.info.err(e)
def prj_crs_changed(self):
try:
self.reset_user_edit()
if self.coordinatereferences is not None: # my combo
self.crs_transform = self.prj.crs()
self.cboCoordSystems.setItemText(0,
"Projekt CRS: " + self.crs_transform.authid() + " - " + self.crs_transform.description())
self.cboCoordSystems.setItemData(0, self.crs_transform)
self.x = 0
self.y = 0
self.xt = 0
self.yt = 0
self.coordX.setText("---")
self.coordY.setText("---")
except Exception as e:
self.info.err(e)
def add_point(self):
try:
self.check_coords()
layer = self.iface.activeLayer()
if layer.geometryType() == QgsWkbTypes.GeometryType.PointGeometry:
self.prj.layerTreeRoot().findLayer(layer.id()).setItemVisibilityCheckedParentRecursive(True)
if self.x != 0 and self.y != 0:
feat = QgsVectorLayerUtils.createFeature(layer)
tr = QgsCoordinateTransform(self.prj.crs(), self.crs_layer, self.prj)
trPoint = tr.transform(QgsPointXY(self.x, self.y))
feat.setGeometry(QgsGeometry.fromPointXY(trPoint))
# direct save
# (res, features) = layer.dataProvider().addFeatures([feat])
# if self.debug: self.info.log("new point:", res, features[0])
# set attributes
dic_info = {"x": self.x, "y": self.y, "snaped": self.snaped}
# self.info.err(None,"mapping:",dic_info)
# self.info.err(None, "addpoint_attributes:", self.addpoint_attributes)
for k, v in self.addpoint_attributes.items():
# self.info.err(None,"attribute:",k,"value:",dic_info[v])
feat[k] = layer.fields().field(k).convertCompatible(dic_info[v])
features = [feat]
layer.featureAdded.connect(self.select_new_feature)
self.save_features(layer, features)
layer.featureAdded.disconnect(self.select_new_feature)
self.marker.hide()
self.helper.refreshLayer(layer)
self.gtomain.runcmd(self.tools_after_addpoint)
else:
self.info.gtoWarning('Ungültige Koordinaten! x:', self.x, "y:", self.y)
else:
self.info.gtoWarning('Kein Punktlayer ausgewählt!')
except Exception as e:
self.info.err(e)
def select_new_feature(self, featId):
try:
if self.debug: self.info.log("new featue:", self.iface.activeLayer().name(), "/ fid:", featId)
self.iface.activeLayer().selectByIds([featId])
self.mapTool.reset_marker()
self.marker.hide()
self.helper.refreshLayer(self.iface.activeLayer())
except Exception as e:
self.info.err(e)
def save_features(self, layer, features):
if not layer.isEditable():
layer.startEditing()
layer.beginEditCommand("layer {0} edit".format(layer.name()))
try:
layer.addFeatures(features)
layer.endEditCommand()
except Exception as e:
layer.destroyEditCommand()
raise e
def copyXt(self):
self.check_coords()
dsp = QDoubleSpinBox()
dsp.setDecimals(16)
self.helper.copyToClipboard(dsp.textFromValue(self.xt))
def copyYt(self):
self.check_coords()
dsp = QDoubleSpinBox()
dsp.setDecimals(16)
self.helper.copyToClipboard(dsp.textFromValue(self.yt))
def reset(self):
if self.debug: self.info.log("widget reset")
self.marker.hide()
def setCoords(self, point, snaped):
try:
self.reset_user_edit()
self.snaped = snaped
self.x = point.x()
self.y = point.y()
if self.debug: self.info.log("setCoords", self.x, "/", self.y)
self.setCrs(self.crs_transform)
# marker
self.marker.setCenter(QgsPointXY(self.x, self.y))
if snaped:
self.marker.setColor(Qt.red)
else:
self.marker.setColor(Qt.blue)
self.marker.show()
except Exception as e:
self.info.err(e)
def showCoordinate(self):
try:
self.check_coords()
self.marker.hide()
if self.x != 0 and self.y != 0:
pt_center = QgsPointXY(self.x, self.y)
self.marker.setCenter(pt_center)
self.marker.show()
# center map
if self.center:
self.canvas.setCenter(pt_center)
# scale map
if self.scale is not None and self.scale > 0:
self.canvas.zoomScale(self.scale)
self.canvas.refresh()
# run tools
self.gtomain.runcmd(self.tools)
else:
self.info.gtoWarning('Ungültige Koordinate! x:', self.x, "y:", self.y)
except Exception as e:
self.info.err(e)
def setCrs(self, crs):
try:
if self.debug: self.info.log("setCrs")
self.crs_transform = crs
tr = QgsCoordinateTransform(self.prj.crs(), self.crs_transform, self.prj)
trPoint = tr.transform(QgsPointXY(self.x, self.y))
self.xt = trPoint.x()
self.yt = trPoint.y()
d = round(trPoint.x(), self.precision)
display = str(d).replace(".", QLocale().decimalPoint())
self.coordX.setText(display)
d = round(trPoint.y(), self.precision)
display = str(d).replace(".", QLocale().decimalPoint())
self.coordY.setText(display)
except Exception as e:
self.info.err(e)
| 41.061947 | 154 | 0.563524 |
6ba4e572e52707590a52608ce4cc12b513909627
| 2,117 |
py
|
Python
|
gemtown/users/serializers.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
gemtown/users/serializers.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | 5 |
2020-09-04T20:13:39.000Z
|
2022-02-17T22:03:33.000Z
|
gemtown/users/serializers.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from gemtown.modelphotos import models as modelphoto_models
from gemtown.modelers import models as modeler_models
from gemtown.musicians import models as musician_models
from . import models
import time
class TimestampField(serializers.Field):
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
class UsernameSerializer(serializers.ModelSerializer):
class Meta:
model = models.User
fields = (
'username',
)
class MusicianSerializer(serializers.ModelSerializer):
class Meta:
model = musician_models.Musician
fields = (
'id',
'nickname',
'country',
)
class ModelPhotoSerializer(serializers.ModelSerializer):
class Meta:
model = modelphoto_models.ModelPhoto
fields = (
'file',
'photo_type',
)
class ModelerSerializer(serializers.ModelSerializer):
cover_image = ModelPhotoSerializer()
class Meta:
model = modeler_models.Modeler
fields = (
'id',
'cover_image',
'nickname',
'country',
)
class UserSerializer(serializers.ModelSerializer):
created_at = TimestampField()
updated_at = TimestampField()
followers = UsernameSerializer(many=True)
followings = UsernameSerializer(many=True)
musician = MusicianSerializer()
modeler = ModelerSerializer()
class Meta:
model = models.User
fields = (
'id',
'username',
'email',
'first_name',
'last_name',
'user_class',
'gem_amount',
'musician',
'modeler',
'gender',
'profile_photo',
'country',
'mobile_number',
'mobile_country',
'followers',
'followings',
'is_superuser',
'is_staff',
'created_at',
'updated_at'
)
| 25.817073 | 60 | 0.561171 |
d84e7b0326da78457b27f3f5b7fda50734903f66
| 775 |
py
|
Python
|
Data-Structures/Stacks/stack.py
|
hussamEL-Hwary/DS-Algo-Handbook
|
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
|
[
"MIT"
] | 18 |
2016-11-01T04:00:36.000Z
|
2021-09-13T14:26:35.000Z
|
Data-Structures/Stacks/stack.py
|
JEERU/DS-Algo-Handbook
|
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
|
[
"MIT"
] | 60 |
2016-10-11T14:50:47.000Z
|
2016-10-31T11:05:01.000Z
|
Data-Structures/Stacks/stack.py
|
JEERU/DS-Algo-Handbook
|
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
|
[
"MIT"
] | 87 |
2016-09-08T05:04:50.000Z
|
2016-10-30T19:19:53.000Z
|
"""Implementation of a stack in python."""
class Stack:
def __init__(self):
self.items = []
def push(self, item):
"""Add an item to the stack."""
self.items.append(item)
def pop(self):
"""Remove the most recent item from the stack."""
if len(self.items) > 0:
last = self.items[-1]
del(self.items[-1])
return last
else:
raise IndexError
def peek(self):
"""Return the most recent item to be pushed to the stack."""
return self.items[-1]
def isEmpty(self):
"""Returns True if stack is empty ."""
return not len(self.items) >= 1
def size(self):
"""Return the size of the stack."""
return len(self.items)
| 24.21875 | 68 | 0.536774 |
d8b43126c4341230aae3fa4c8b5aa73490e76164
| 356 |
py
|
Python
|
uebung/bmi.py
|
wieerwill/Python-Intro
|
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
|
[
"MIT"
] | 3 |
2019-03-02T16:34:53.000Z
|
2021-11-15T11:43:53.000Z
|
uebung/bmi.py
|
wieerwill/Python-Intro
|
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
|
[
"MIT"
] | null | null | null |
uebung/bmi.py
|
wieerwill/Python-Intro
|
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
|
[
"MIT"
] | null | null | null |
# Calculate your Body-Mass-Index with Python
print("BMI - Calculator!")
weight_str = input("Please insert your weight (in kg): ")
height_str = input("Please insert your bodys height(in m): ")
weight = float(weight_str.replace(",", "."))
height = float(height_str.replace(",", "."))
bmi = weight / height ** 2
print("Your BMI is: " + str(round(bmi, 1)))
| 29.666667 | 61 | 0.668539 |
514737538b6050cbe92637918e942f1823b10292
| 1,699 |
py
|
Python
|
server/weather/RestWeatherProvider.py
|
EveryOtherUsernameWasAlreadyTaken/BIS
|
e132ce42dcc74e634231398dfecb08834d478cba
|
[
"MIT"
] | 3 |
2019-07-09T08:51:20.000Z
|
2019-09-16T17:27:54.000Z
|
server/weather/RestWeatherProvider.py
|
thomasw-mitutoyo-ctl/BIS
|
08525cc12164902dfe968ae41beb6de0cd5bc411
|
[
"MIT"
] | 24 |
2019-06-17T12:33:35.000Z
|
2020-03-27T08:17:35.000Z
|
server/weather/RestWeatherProvider.py
|
EveryOtherUsernameWasAlreadyTaken/BIS
|
e132ce42dcc74e634231398dfecb08834d478cba
|
[
"MIT"
] | 1 |
2020-03-24T17:54:07.000Z
|
2020-03-24T17:54:07.000Z
|
import json
import logging
import threading
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
log = logging.getLogger(__name__)
class RestWeatherProvider(threading.Thread):
"""
The RestWeatherProvider serves the collected weather data using a simple http server. The weather data can be
obtained by doing a simple http GET request
"""
def __init__(self, repository, address, port):
super(RestWeatherProvider, self).__init__()
self.repository = repository
self.port = port
self.address = address
def run(self):
try:
log.info("Starting WeatherProvider")
# Create and start the http server
server = HTTPServer((self.address, self.port), self.request_handler)
server.serve_forever()
except Exception as e:
log.exception("WeatherProvider threw an exception: " + str(e))
def request_handler(self, *args):
HTTPRequestHandler(self.repository, *args)
class HTTPRequestHandler(BaseHTTPRequestHandler):
"""
HTTPRequestHandler for the RestWeatherProvider
"""
def __init__(self, repository, *args):
self.repository = repository
BaseHTTPRequestHandler.__init__(self, *args)
# noinspection PyPep8Naming
def do_GET(self):
"""
Handles the GET request and returns the weather in json format
"""
self.send_response(200)
self.send_header('Content-type', 'application/json;charset=utf-8')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
data = self.repository.get_all_data()
self.wfile.write(str(json.dumps(data)))
| 30.339286 | 114 | 0.669806 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.