hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dfad2ce40cf4b3e7c6bdab613bdf207aa9161bc1
| 2,576
|
py
|
Python
|
backend/app/app/api/deps.py
|
totalhack/zillion-web
|
e567c04d3564aec8105d54533d318b79d943c9c6
|
[
"MIT"
] | 3
|
2020-10-01T11:28:02.000Z
|
2020-10-31T15:35:51.000Z
|
backend/app/app/api/deps.py
|
totalhack/zillion-web
|
e567c04d3564aec8105d54533d318b79d943c9c6
|
[
"MIT"
] | 1
|
2022-02-09T04:19:20.000Z
|
2022-02-09T13:56:40.000Z
|
backend/app/app/api/deps.py
|
totalhack/zillion-web
|
e567c04d3564aec8105d54533d318b79d943c9c6
|
[
"MIT"
] | null | null | null |
from typing import Generator, Dict, Any
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from jose import jwt
from pydantic import ValidationError
from sqlalchemy.orm import Session
from tlbx import json, pp
from zillion.configs import load_warehouse_config, zillion_config
from zillion.model import Warehouses
from zillion.warehouse import Warehouse
from app import app
from app import crud, models, schemas
from app.core import security
from app.core.config import settings
from app.db.session import SessionLocal
reusable_oauth2 = OAuth2PasswordBearer(
tokenUrl=f"{settings.API_V1_STR}/login/access-token"
)
warehouses = {}
@app.on_event("startup")
async def init_warehouses():
global warehouses
warehouses = get_warehouses()
def get_db() -> Generator:
try:
db = SessionLocal()
yield db
finally:
db.close()
def get_warehouses() -> Dict[str, Any]:
"""NOTE: this assumes Zillion Web DB is same as Zillion DB"""
global warehouses
if warehouses:
# TODO: cache control?
return warehouses
print("Building warehouses...")
db = SessionLocal()
try:
result = db.query(Warehouses).all()
for row in result:
warehouses[row.id] = Warehouse.load(row.id)
pp(warehouses)
return warehouses
finally:
db.close()
def get_current_user(
db: Session = Depends(get_db), token: str = Depends(reusable_oauth2)
) -> models.User:
try:
payload = jwt.decode(
token, settings.SECRET_KEY, algorithms=[security.ALGORITHM]
)
token_data = schemas.TokenPayload(**payload)
except (jwt.JWTError, ValidationError):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Could not validate credentials",
)
user = crud.user.get(db, id=token_data.sub)
if not user:
raise HTTPException(status_code=404, detail="User not found")
return user
def get_current_active_user(
current_user: models.User = Depends(get_current_user),
) -> models.User:
if not crud.user.is_active(current_user):
raise HTTPException(status_code=400, detail="Inactive user")
return current_user
def get_current_active_superuser(
current_user: models.User = Depends(get_current_user),
) -> models.User:
if not crud.user.is_superuser(current_user):
raise HTTPException(
status_code=400, detail="The user doesn't have enough privileges"
)
return current_user
| 27.115789
| 77
| 0.69604
| 319
| 2,576
| 5.489028
| 0.360502
| 0.056539
| 0.054826
| 0.063963
| 0.198744
| 0.131354
| 0.131354
| 0.131354
| 0.076528
| 0.076528
| 0
| 0.008437
| 0.21778
| 2,576
| 94
| 78
| 27.404255
| 0.860546
| 0.029891
| 0
| 0.297297
| 0
| 0
| 0.066185
| 0.016045
| 0
| 0
| 0
| 0.010638
| 0
| 1
| 0.067568
| false
| 0.027027
| 0.202703
| 0
| 0.337838
| 0.013514
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfb822e8f7cafa7cb423cc71ade94b740d42328b
| 7,462
|
py
|
Python
|
simple_task_repeater/str_app.py
|
lavrpetrov/simple-task-repeater
|
cd56ed52143ac31171fc757c6e1f7740bebe1ed4
|
[
"MIT"
] | null | null | null |
simple_task_repeater/str_app.py
|
lavrpetrov/simple-task-repeater
|
cd56ed52143ac31171fc757c6e1f7740bebe1ed4
|
[
"MIT"
] | null | null | null |
simple_task_repeater/str_app.py
|
lavrpetrov/simple-task-repeater
|
cd56ed52143ac31171fc757c6e1f7740bebe1ed4
|
[
"MIT"
] | 1
|
2021-04-20T15:38:44.000Z
|
2021-04-20T15:38:44.000Z
|
import datetime
from collections import Counter
from functools import wraps
from dateparser import parse as parse_date
from calmlib import get_current_date, get_current_datetime, to_date, trim
from .base import Task
from .str_database import STRDatabase
from .telegram_bot import TelegramBot, command, catch_errors
DEFAULT_PERIOD = 4
TASK_PER_DAY_LIMIT = 3
class STRApp(TelegramBot):
# todo: rewrite all commands, add decorator that parses message and passes it to the command as kwargs.
@wraps(TelegramBot.__init__)
def __init__(self, db: STRDatabase, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db = db
self._actualize_tasks()
self._last_actualize_date = get_current_date()
@staticmethod
def _tokenize_message(message):
result = {}
# cut off command code and get shortcut.
parts = message.split(maxsplit=2)
if len(parts) == 1:
raise ValueError("No task shortcut provided")
elif len(parts) == 2:
return {'shortcut': parts[1]}
_, result['shortcut'], message = parts
parts = message.split(':')
key = "text"
for part in parts[:-1]:
result[key], key = map(str.strip, part.rsplit(maxsplit=1))
result[key] = parts[-1].strip()
if not result['text']:
del result['text']
return result
def _parse_task(self, user, task):
"""
"""
if 'date' in task:
try:
task['date'] = parse_date(task['date'])
except:
raise ValueError(f"Failed to parse date {task['date']}")
else:
task['date'] = self._determine_suitable_date(user)
if 'period' in task:
task['period'] = int(task['period'])
else:
task['period'] = self._determine_suitable_period(user)
return task
def _determine_suitable_period(self, user):
# todo: count current tasks and estimate period necessary to stay below task_per_day_limit
# discard large-period tasks.
return DEFAULT_PERIOD
def _determine_suitable_date(self, user_name):
tasks = self.db.get_users_tasks(user_name)
tasks_dates = Counter([task.date.date() for task in tasks])
# find_date
task_date = get_current_datetime()
td = datetime.timedelta(days=1)
while tasks_dates[task_date.date()] >= TASK_PER_DAY_LIMIT:
task_date += td
# this naturally stops because each task register only once.
return task_date
def parse_message(self, user, message):
return self._parse_task(user, STRApp._tokenize_message(message))
@command
@catch_errors
def add(self, user, message):
"""
Add new task from message
Message should have format
{shortcut} {task text} period:1 {key}:{value}
"""
result = ""
task = {'user': user}
task.update(self.parse_message(user, message))
# todo: if date is not specified pick something suitable.
# todo: if period is not specified - pick something suitable depending on current load
task = Task(**task)
self.db.add_task(task)
result += f"Added task {task.shortcut}"
return result
@command
@catch_errors
def update(self, user, message):
update = self.parse_message(user, message)
task = self.db.get_task(user, update['shortcut'])
task.__dict__.update(update)
self.db.update_task(user, task)
return f"Successfully updated task {task.shortcut}"
@command
@catch_errors
def remove(self, user, message):
"""
Remove task.
"""
task = self.parse_message(user, message)
self.db.remove_task(user, task['shortcut'])
return f"Task {task['shortcut']} removed"
@command
@catch_errors
def get(self, user, message):
"""
Remove task.
"""
task = self.parse_message(user, message)
task = self.db.get_task(user, task['shortcut'])
return repr(task)
@command
@catch_errors
def start(self, user, message):
try:
self.db.add_user(user)
except ValueError:
return f"User {user} already active"
return f"Added user {user} successfully"
@command
@catch_errors
def stop(self, user, message):
try:
self.db.remove_user(user)
except ValueError:
return f"No user {user}"
return f"Removed user {user} successfully"
@command
def list_all(self, user, message):
"""
List shortcuts of users tasks
"""
# todo: make a short task repr.
return '\n'.join([task.shortcut for task in self.db.get_users_tasks(user)])
@command
def list(self, user, message):
"""
Get tasks for particular date.
"""
message = trim(message, '/list')
if message.strip():
date = parse_date(message)
else:
date = get_current_datetime()
self.actualize_tasks()
tasks = self.db.get_users_tasks(user)
# need to cast into date because date is datetime with hours etc.
tasks = [task for task in tasks if to_date(task.date) == to_date(date)]
response = date.strftime("Tasks for %a, %d %b\n")
response += "\n".join([task.text for task in tasks])
return response
@command
def complete(self, user, message):
"""
Register that you've completed a task
:param user:
:param message:
:return:
"""
task = self.parse_message(user, message)
if 'date' in task:
date = parse_date(task['date'])
else:
date = get_current_datetime()
task = self.db.get_task(user=user, shortcut=task['shortcut'])
task.completions.append(date)
task.date = date + datetime.timedelta(days=task.period)
self.db.update_task(task)
@command
def help(self, user, message):
"""
Return commands and shortened docstrings.
"""
reply = ""
# todo: add docstrings - instead of help message for each command.
# todo: how to make telegram list all possible commands?
reply += "Commands: \n"
reply += '\n '.join([command.__name__ for command in self.commands])
reply += "Task fields: \n"
reply += '\n '.join(Task.declared_fields.keys())
return reply
def run(self):
with self.db:
super().run()
def actualize_tasks(self):
if self._last_actualize_date < get_current_date():
self._actualize_tasks()
self._last_actualize_date = get_current_date()
def _actualize_tasks(self):
"""
Go over all tasks and update date/reschedule
"""
for user in self.db.user_names:
for task in self.db.get_users_tasks(user):
today = get_current_datetime()
while to_date(task.date) < to_date(today):
if task.reschedule:
# if task is past due and to be rescheduled - reschedule it on today
task.date = today
else:
task.date += datetime.timedelta(days=task.period)
self.db.update_task(task)
| 32.163793
| 107
| 0.590592
| 896
| 7,462
| 4.761161
| 0.21317
| 0.025316
| 0.038678
| 0.029536
| 0.241444
| 0.214018
| 0.129395
| 0.108064
| 0.108064
| 0.093061
| 0
| 0.002125
| 0.306352
| 7,462
| 231
| 108
| 32.30303
| 0.822063
| 0.148486
| 0
| 0.261147
| 0
| 0
| 0.071686
| 0
| 0
| 0
| 0
| 0.021645
| 0
| 1
| 0.121019
| false
| 0
| 0.050955
| 0.012739
| 0.286624
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfb8674c6f7746d9692d1c11fcd1c8fdb24ebb98
| 258
|
py
|
Python
|
Strings/conversion-operation.py
|
tverma332/python3
|
544c4ec9c726c37293c8da5799f50575cc50852d
|
[
"MIT"
] | 3
|
2022-03-28T09:10:08.000Z
|
2022-03-29T10:47:56.000Z
|
Strings/conversion-operation.py
|
tverma332/python3
|
544c4ec9c726c37293c8da5799f50575cc50852d
|
[
"MIT"
] | 1
|
2022-03-27T11:52:58.000Z
|
2022-03-27T11:52:58.000Z
|
Strings/conversion-operation.py
|
tverma332/python3
|
544c4ec9c726c37293c8da5799f50575cc50852d
|
[
"MIT"
] | null | null | null |
# lower , title , upper operations on string
x = "spider"
y = "MAN"
v=x.upper() # all letters will become uppercase
w=y.lower() # all letters will become lowercase
z=y.title() # only first letter will become upper and rest of all lowercase
print(v,w,z)
| 19.846154
| 75
| 0.705426
| 44
| 258
| 4.136364
| 0.590909
| 0.164835
| 0.153846
| 0.21978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 258
| 12
| 76
| 21.5
| 0.866667
| 0.666667
| 0
| 0
| 0
| 0
| 0.113924
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfdddf5fadff57862aa0a7599f33c1b1aa1825cf
| 1,188
|
py
|
Python
|
src/pylendingclub/wrapper/order.py
|
bbarney213/PyLendingClub-Wrapper
|
7f2ef823a56bc87b7f88abf86498805c2c9ce3d3
|
[
"BSD-3-Clause"
] | 2
|
2018-06-06T20:04:04.000Z
|
2018-06-13T12:17:37.000Z
|
src/pylendingclub/wrapper/order.py
|
bbarney213/PyLendingClub
|
7f2ef823a56bc87b7f88abf86498805c2c9ce3d3
|
[
"BSD-3-Clause"
] | 4
|
2018-10-18T13:59:14.000Z
|
2018-10-23T16:08:51.000Z
|
src/pylendingclub/wrapper/order.py
|
bbarney213/PyLendingClub
|
7f2ef823a56bc87b7f88abf86498805c2c9ce3d3
|
[
"BSD-3-Clause"
] | null | null | null |
class Order(object):
def order(self):
return {
'loanId': self._loan_id,
'requestedAmount': self._amount,
'portfolioId': self._portfolio
}
def __init__(self, loan_id, amount, portfolio):
self._loan_id = int(loan_id)
self._amount = amount
self._portfolio = portfolio
@classmethod
def from_dict(cls, input_dict):
return cls(input_dict.get('loanId'),
input_dict.get('requestedAmount'),
input_dict.get('portfolioId'))
class ConfirmedOrder():
# TODO : Surface properties of the notes being purchased
@property
def fulfilled(self):
return self._fulfilled
def data(self):
source = dict(self._source)
source['fulfilled'] = self.fulfilled
return source
def __init__(self, json):
self._source = json
self._loan_id = json.get('loanId')
self._requested_amount = json.get('requestedAmount')
self._invested_amount = json.get('investedAmount')
self._execution_status = json.get('executionStatus')
self._fulfilled = 'ORDER_FULFILLED' in self._execution_status
| 29.7
| 69
| 0.619529
| 126
| 1,188
| 5.539683
| 0.333333
| 0.04298
| 0.057307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 1,188
| 39
| 70
| 30.461538
| 0.81352
| 0.045455
| 0
| 0
| 0
| 0
| 0.121908
| 0
| 0
| 0
| 0
| 0.025641
| 0
| 1
| 0.193548
| false
| 0
| 0
| 0.096774
| 0.387097
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfe2c5242a263720a913b48fff9c5a2c72756ddd
| 1,246
|
py
|
Python
|
Python3-StandardLibrary/Chapter16_Web03_cgi.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
Python3-StandardLibrary/Chapter16_Web03_cgi.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
Python3-StandardLibrary/Chapter16_Web03_cgi.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import cgi
form = cgi.FieldStorage() # 创建FieldStorage实例(应只创建一个)
name = form.getvalue('name', 'world') # CGI脚本通过getvalue方法获取值,这里默认值为world
print("""Content-type: text/html
<html>
<head>
<title>Greeting Page</title>
</head>
<body>
<h1>Hello, {}!</h1>
<form action='Chapter16_Web03_cgi.py'>
Change Name:<input type='text' name='name'>
<input type='submit' value='Submit'>
</form>
</body>
</html>
""".format(name))
# ### 脚本说明
# 实现包含HTML表单的简单CGI脚本;
# 执行脚本:
# 1-启动支持cgi的Web服务器:在命令行下执行“py -3 -m http.server --cgi”;
# 2-将本CGI脚本放在服务器所在目录的子目录cgi-bin,并设置权限;
# 3-在浏览器打开“http://127.0.0.1:8000/cgi-bin/Chapter16_Web03_cgi.py”;
# 4-填写文本并提交,将显示形如“Hello world”的内容;
#
# ### HTML表单的相关说明
# - HTML表单是一个包含表单元素的区域,允许用户在表单中输入内容,比如文本域、下拉列表、单选框、复选框、提交按钮等;
# - 使用表单标签<form>来设置,属性action设置为脚本的名称,意味着提交表单后将再次运行这个脚本;
# - 输入元素标签<input>:输入类型由类型属性(type)定义;
#
# ### Web框架
# 对于重要或复杂的Web应用,一般不会直接为其编写繁琐的CGI脚本,而是使用Web框架,自动完成很多繁重的环节;
# 更多信息:Python的Web编程指南(https://wiki.python.org/moin/WebProgramming);
#
# ### Web框架Flask
# 简单又实用的Flask,适用于较复杂的服务端Web应用开发;
# A simple framework for building complex web applications.
# Home-page: https://www.palletsprojects.com/p/flask/
# Documentation:http://flask.pocoo.org/docs/
| 28.318182
| 73
| 0.699839
| 148
| 1,246
| 5.864865
| 0.709459
| 0.018433
| 0.039171
| 0.043779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023744
| 0.121188
| 1,246
| 43
| 74
| 28.976744
| 0.76895
| 0.637239
| 0
| 0
| 0
| 0
| 0.705314
| 0.077295
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfe2c9adf24a8776a39019cdfbf8a0a54e0be58c
| 1,809
|
py
|
Python
|
predefined_values.py
|
kovvik/bert_reader
|
1b3b6a2bc29a026c64d2d7ba53ec5fabebf1f9e5
|
[
"MIT"
] | null | null | null |
predefined_values.py
|
kovvik/bert_reader
|
1b3b6a2bc29a026c64d2d7ba53ec5fabebf1f9e5
|
[
"MIT"
] | null | null | null |
predefined_values.py
|
kovvik/bert_reader
|
1b3b6a2bc29a026c64d2d7ba53ec5fabebf1f9e5
|
[
"MIT"
] | null | null | null |
# https://uefi.org/sites/default/files/resources/UEFI%20Spec%202_6.pdf
# N.2.2 Section Descriptor
section_types = {
"9876ccad47b44bdbb65e16f193c4f3db": {
"name": "Processor Generic",
"error_record_reference": {}
},
"dc3ea0b0a1444797b95b53fa242b6e1d": {
"name": "Processor Specific - IA32/X64",
"error_record_reference": {}
},
"e429faf13cb711d4bca70080c73c8881": {
"name": "Processor Specific - IPF",
"error_record_reference": {}
},
"e19e3d16bc1111e49caac2051d5d46b0": {
"name": "Processor Specific - ARM",
"error_record_reference": {}
},
"a5bc11146f644edeb8633e83ed7c83b1": {
"name": "Platform Memory",
"error_record_reference": {}
},
"d995e954bbc1430fad91b44dcb3c6f35": {
"name": "PCIe",
"error_record_reference": {}
},
"81212a9609ed499694718d729c8e69ed": {
"name": "Firmware Error Record Reference",
"error_record_reference": {
"firmware_error_record_type": (0, 1, "byte"),
"reserved": (1, 7, "hex"),
"record_identifier": (8, 8, "hex")
}
},
"c57539633b844095bf78eddad3f9c9dd": {
"name": "PCI/PCI-X Bus",
"error_record_reference": {}
},
"eb5e4685ca664769b6a226068b001326": {
"name": "DMAr Generic",
"error_record_reference": {}
},
"71761d3732b245cda7d0b0fedd93e8cf": {
"name": "Intel® VT for Directed I/O specific DMAr section",
"error_record_reference": {}
},
"036f84e17f37428ca79e575fdfaa84ec": {
"name": "IOMMU specific DMAr section",
"error_record_reference": {}
}
}
error_severity = [
"Recoverable (also called non-fatal uncorrected)",
"Fatal",
"Corrected",
"Informational"
]
| 29.177419
| 70
| 0.599779
| 140
| 1,809
| 7.55
| 0.514286
| 0.135289
| 0.227058
| 0.051088
| 0.073794
| 0.073794
| 0
| 0
| 0
| 0
| 0
| 0.174107
| 0.257048
| 1,809
| 61
| 71
| 29.655738
| 0.611607
| 0.05141
| 0
| 0.178571
| 0
| 0
| 0.594042
| 0.36215
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dfea1cd7528525e57a90decbb00e4b3b1963212b
| 4,114
|
py
|
Python
|
tests/web/test_show_image.py
|
AndrewLorente/catsnap
|
57427b8f61ef5185a41e49d55ffd7dd328777834
|
[
"MIT"
] | 5
|
2015-11-23T18:40:00.000Z
|
2019-03-22T06:54:04.000Z
|
tests/web/test_show_image.py
|
AndrewLorente/catsnap
|
57427b8f61ef5185a41e49d55ffd7dd328777834
|
[
"MIT"
] | 5
|
2016-04-07T15:35:53.000Z
|
2019-02-10T23:00:32.000Z
|
tests/web/test_show_image.py
|
AndrewLorente/catsnap
|
57427b8f61ef5185a41e49d55ffd7dd328777834
|
[
"MIT"
] | 2
|
2015-12-02T16:44:05.000Z
|
2017-09-29T23:17:33.000Z
|
from __future__ import unicode_literals
import json
from tests import TestCase, with_settings
from nose.tools import eq_
from catsnap import Client
from catsnap.table.image import Image, ImageResize
from catsnap.table.album import Album
class TestShowImage(TestCase):
@with_settings(aws={'bucket': 'snapcats'})
def test_view_an_image(self):
session = Client().session()
album = Album(name='cow shots')
session.add(album)
session.flush()
prev_image = Image(filename='badcafe',
album_id=album.album_id)
session.add(prev_image)
image = Image(filename='deadbeef',
description='one time I saw a dead cow',
title='dead beef',
album_id=album.album_id)
session.add(image)
next_image = Image(filename='dadface',
album_id=album.album_id)
session.add(next_image)
session.flush()
response = self.app.get('/image/%d' % image.image_id)
assert 'https://s3.amazonaws.com/snapcats/deadbeef' in response.data,\
response.data
assert 'one time I saw a dead cow' in response.data, response.data
assert 'cow shots' in response.data, response.data
assert str(prev_image.image_id) in response.data, response.data
assert str(next_image.image_id) in response.data, response.data
@with_settings(aws={'bucket': 'snapcats'})
def test_view_an_image__defaults_to_medium(self):
session = Client().session()
image = Image(filename='deadbeef',
description='one time I saw a dead cow',
title='dead beef')
session.add(image)
session.flush()
for (size, suffix) in [(100, 'thumbnail'), (320, 'small'), (500, 'medium'), (1600, 'large')]:
session.add(ImageResize(image_id=image.image_id, width=size, height=size, suffix=suffix))
session.flush()
response = self.app.get('/image/%d' % image.image_id)
assert 'https://s3.amazonaws.com/snapcats/deadbeef_medium' in response.data,\
response.data
# if no medium exists, assume it's because the original is smaller than a
# "medium," and thus the original is an appropriate size.
@with_settings(aws={'bucket': 'snapcats'})
def test_view_an_image__defaults_to_original_if_no_medium_exists(self):
session = Client().session()
image = Image(filename='deadbeef',
description='one time I saw a dead cow',
title='dead beef')
session.add(image)
session.flush()
for (size, suffix) in [(100, 'thumbnail'), (320, 'small')]:
session.add(ImageResize(image_id=image.image_id, width=size, height=size, suffix=suffix))
session.flush()
response = self.app.get('/image/%d' % image.image_id)
assert 'src="https://s3.amazonaws.com/snapcats/deadbeef"' in response.data,\
response.data
@with_settings(aws={'bucket': 'snapcats'})
def test_get_image_info_as_json(self):
session = Client().session()
album = Album(name='cow shots')
session.add(album)
session.flush()
image = Image(filename='deadbeef',
description='one time I saw a dead cow',
title='dead beef',
album_id=album.album_id)
session.add(image)
image.add_tags(['cow', 'dead'])
session.flush()
response = self.app.get('/image/%d.json' % image.image_id)
eq_(json.loads(response.data), {
'description': 'one time I saw a dead cow',
'title': 'dead beef',
'album_id': album.album_id,
'tags': [ 'cow', 'dead', ],
'source_url': 'https://s3.amazonaws.com/snapcats/deadbeef',
'camera': None,
'photographed_at': None,
'focal_length': None,
'aperture': None,
'shutter_speed': None,
'iso': None,
})
| 39.557692
| 101
| 0.59018
| 489
| 4,114
| 4.822086
| 0.224949
| 0.067854
| 0.040712
| 0.06531
| 0.726887
| 0.715861
| 0.684902
| 0.633588
| 0.597116
| 0.597116
| 0
| 0.007834
| 0.286339
| 4,114
| 103
| 102
| 39.941748
| 0.7953
| 0.03087
| 0
| 0.545455
| 0
| 0
| 0.174197
| 0
| 0
| 0
| 0
| 0
| 0.079545
| 1
| 0.045455
| false
| 0
| 0.079545
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
5f049724d72ac2de8c5b11138f1e4b59bdb512ad
| 1,744
|
py
|
Python
|
src/harness/cu_pass/dpa_calculator/helpers/list_distributor/list_distributor.py
|
NSF-Swift/Spectrum-Access-System
|
02cf3490c9fd0cec38074d3bdb3bca63bb7d03bf
|
[
"Apache-2.0"
] | null | null | null |
src/harness/cu_pass/dpa_calculator/helpers/list_distributor/list_distributor.py
|
NSF-Swift/Spectrum-Access-System
|
02cf3490c9fd0cec38074d3bdb3bca63bb7d03bf
|
[
"Apache-2.0"
] | null | null | null |
src/harness/cu_pass/dpa_calculator/helpers/list_distributor/list_distributor.py
|
NSF-Swift/Spectrum-Access-System
|
02cf3490c9fd0cec38074d3bdb3bca63bb7d03bf
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import Any, List, TypeVar
from cu_pass.dpa_calculator.helpers.list_distributor.fractional_distribution.fractional_distribution import \
FractionalDistribution
RETURN_TYPE = TypeVar('RETURN_TYPE')
class ListDistributor(ABC):
def __init__(self, items_to_distribute: List[Any]):
self._items = items_to_distribute
def distribute(self) -> List[List[RETURN_TYPE]]:
return [self._modify_group(distribution=distribution, group=group)
for distribution, group in zip(self._distributions, self._groups)]
@abstractmethod
def _modify_group(self, distribution: FractionalDistribution, group: List[Any]) -> List[RETURN_TYPE]:
return group
@property
def _groups(self) -> List[List[Any]]:
groups = []
for distribution in self._distributions:
next_index = sum(len(group) for group in groups)
remaining_items = self._items[next_index:]
items_in_group = self._get_items_in_distribution(distribution=distribution, items=remaining_items)
groups.append(items_in_group)
return groups
@property
@abstractmethod
def _distributions(self) -> List[FractionalDistribution]:
raise NotImplementedError
def _get_items_in_distribution(self, distribution: FractionalDistribution, items: List[Any]) -> List[Any]:
number_at_this_distribution = round(self._total_number_of_items * distribution.fraction)
is_last_distribution = distribution == self._distributions[-1]
return items if is_last_distribution else items[:number_at_this_distribution]
@property
def _total_number_of_items(self) -> int:
return len(self._items)
| 38.755556
| 110
| 0.725344
| 198
| 1,744
| 6.065657
| 0.287879
| 0.029142
| 0.02831
| 0.033306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000709
| 0.191514
| 1,744
| 44
| 111
| 39.636364
| 0.851064
| 0
| 0
| 0.147059
| 0
| 0
| 0.006307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205882
| false
| 0.029412
| 0.088235
| 0.088235
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
5f0b9146ca28c5866c71c4fff522e7ed582731d7
| 3,900
|
py
|
Python
|
cir/user_views.py
|
wafield/cir
|
123d4bfe3e5bb4b0d605de486a91a0cb7eb34e4c
|
[
"MIT"
] | null | null | null |
cir/user_views.py
|
wafield/cir
|
123d4bfe3e5bb4b0d605de486a91a0cb7eb34e4c
|
[
"MIT"
] | null | null | null |
cir/user_views.py
|
wafield/cir
|
123d4bfe3e5bb4b0d605de486a91a0cb7eb34e4c
|
[
"MIT"
] | 1
|
2018-06-23T21:11:53.000Z
|
2018-06-23T21:11:53.000Z
|
import json
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponse
from django.utils import timezone
from django.contrib.auth.signals import user_logged_in
from cir.models import *
VISITOR_ROLE = 'visitor'
def login_view(request):
response = {}
email = request.REQUEST.get('email').lower()
password = request.REQUEST.get('password')
users = User.objects.filter(username=email)
if users.count() != 1:
return HttpResponse("Your user name and/or password is incorrect.", status=403)
user = authenticate(username=users[0].username, password=password)
if user:
login(request, user)
# request.session['user_id'] = user.id
response['user_id'] = user.id
response['user_name'] = user.get_full_name()
request.session['role'] = VISITOR_ROLE
try:
forum = Forum.objects.get(id=request.session['forum_id'])
if request.session['forum_id'] != -1:
request.session['role'] = Role.objects.get(user=user, forum=forum).role
except:
pass
response['role'] = request.session['role']
return HttpResponse(json.dumps(response), mimetype='application/json')
else:
return HttpResponse("Your user name and/or password is incorrect.", status=403)
def register(request):
response = {}
email = request.REQUEST.get('email').lower()
if User.objects.filter(username=email).count() > 0:
return HttpResponse("This user already exists; please sign in.", status=403)
password = request.POST['password']
description = request.POST['description']
user = User.objects.create_user(email, email, password)
user.first_name = request.POST['first-name']
user.last_name = request.POST['last-name']
user.save()
userinfo = UserInfo(user=user, description=description, last_visited_forum=None)
userinfo.save()
user = authenticate(username=email, password=password)
if user:
login(request, user)
# request.session['user_id'] = user.id
response['user_id'] = user.id
response['user_name'] = user.get_full_name()
response['role'] = VISITOR_ROLE
return HttpResponse(json.dumps(response), mimetype='application/json')
else:
return HttpResponse("Unknown error.", status=403)
def logout_view(request):
forum_id = request.session['forum_id']
logout(request)
# request.session['user_id'] = -1
request.session['forum_id'] = forum_id
return HttpResponse(json.dumps({}), mimetype='application/json')
def change_info(request):
if not request.user.is_authenticated():
return HttpResponse("Please log in first.", status=403)
user = request.user
action = request.REQUEST.get('action')
if action == 'get':
response = {}
response['first_name'] = user.first_name
response['last_name'] = user.last_name
response['email'] = user.email
response['description'] = user.info.description
return HttpResponse(json.dumps(response), mimetype='application/json')
if action == 'set-pw':
old_pw = request.REQUEST.get('old_password')
if not user.check_password(old_pw):
return HttpResponse("Password incorrect.", status=403)
new_pw = request.REQUEST.get('new_password')
user.set_password(new_pw)
user.save()
return HttpResponse(json.dumps({}), mimetype='application/json')
if action == 'set-info':
response = {}
user.first_name = request.REQUEST.get('first-name')
user.last_name = request.REQUEST.get('last-name')
user.info.description = request.REQUEST.get('description')
user.info.save()
user.save()
response['user_name'] = user.get_full_name()
return HttpResponse(json.dumps(response), mimetype='application/json')
| 39
| 87
| 0.661282
| 468
| 3,900
| 5.40812
| 0.181624
| 0.085342
| 0.06045
| 0.064006
| 0.407349
| 0.365468
| 0.33465
| 0.282892
| 0.199921
| 0.199921
| 0
| 0.007434
| 0.206667
| 3,900
| 99
| 88
| 39.393939
| 0.810601
| 0.026923
| 0
| 0.325581
| 0
| 0
| 0.144025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0.151163
| 0.069767
| 0
| 0.255814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
5f0f752c5211938014f35ccb9166c1413d779264
| 3,313
|
py
|
Python
|
test/test_prob_models.py
|
sylar233/de-identification
|
44731e9c22de647063bd82a19936b4c5a144ea6e
|
[
"Apache-2.0"
] | 5
|
2016-11-07T12:54:51.000Z
|
2018-12-15T00:20:26.000Z
|
test/test_prob_models.py
|
sylar233/de-identification
|
44731e9c22de647063bd82a19936b4c5a144ea6e
|
[
"Apache-2.0"
] | 5
|
2016-07-05T06:06:31.000Z
|
2016-07-27T05:21:36.000Z
|
test/test_prob_models.py
|
sylar233/de-identification
|
44731e9c22de647063bd82a19936b4c5a144ea6e
|
[
"Apache-2.0"
] | 3
|
2018-07-18T07:32:43.000Z
|
2021-11-05T05:25:55.000Z
|
from django.test import TestCase
from common.data_utilities import DataUtils
from prob_models.dep_graph import DependencyGraph
from prob_models.jtree import JunctionTree
import common.constant as c
TESTING_FILE = c.TEST_DATA_PATH
"""
The test file has for fields, and the dependency graph would be a complete graph.
The junction Tree has only one clique
"""
class DependencyGraphTests(TestCase):
def setUp(self):
self.data = DataUtils(TESTING_FILE)
def test_dep_graph_edges_length_is_6(self):
"""
Test the Dependency graph computation
"""
dep_graph = DependencyGraph(self.data)
edges = dep_graph.get_dep_edges()
#print self.data.get_domain()
self.assertEqual(len(edges) == 3, True)
def test_dep_graph_with_white_list(self):
dep_graph = DependencyGraph(self.data, white_list = [['Age', 'Income', 'TRV'], ['DGF', 'HTN']])
edges = dep_graph.get_dep_edges()
def dep_graph_without_noise(self):
dep_graph = DependencyGraph(self.data, noise_flag = False)
self.assertEqual(
dep_graph.get_dep_edges() == [['Height', 'HTN'], ['Weight', 'HTN'], ['Income', 'TRV']],
True)
def test_dep_graph_contruct_from_edges(self):
edges = [['A','B'], ['B','C'], ['C', 'D'], ['D', 'E']]
dep_graph = DependencyGraph(edges = edges)
self.assertEqual(dep_graph.get_dep_edges() == edges, True)
def test_dep_graph_add_white_list(self):
edges = [['A','B'], ['B','C'], ['C', 'D'], ['D', 'E'], ['F']]
white_list = [['A','B', 'C'], ['C', 'D', 'F']]
dep_graph = DependencyGraph(edges = edges)
self.assertEqual(dep_graph.set_white_list(white_list).get_dep_edges() == [['A', 'B'], ['B', 'C'], ['C', 'D'], ['D', 'E'], ['F'], ('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'D'), ('C', 'F'), ('D', 'F')], True)
class JunctionTreeTests(TestCase):
def setUp(self):
self.data = DataUtils(TESTING_FILE)
self.dep_graph = DependencyGraph(self.data)
self.edges = self.dep_graph.get_dep_edges()
self.nodes = self.data.get_nodes_name()
self.jtree_path = c.TEST_JTREE_FILE_PATH
def test_jtree_without_noise(self):
dep_graph = DependencyGraph(self.data, noise_flag = False)
edges = dep_graph.get_dep_edges()
jtree = JunctionTree(edges, self.nodes, self.jtree_path)
cliques = jtree.get_jtree()['cliques']
self.assertEqual(cliques == [['HTN', 'Height'], ['HTN', 'Weight'], ['Income', 'TRV'], ['Age'], ['DGF']], True)
def test_jtree_with_white_list(self):
dep_graph = DependencyGraph(self.data, white_list = [['Age', 'Income', 'TRV'], ['DGF', 'HTN']])
edges = dep_graph.get_dep_edges()
jtree = JunctionTree(edges, self.nodes, self.jtree_path)
cliques = jtree.get_jtree()['cliques']
self.assertEqual(cliques == [['HTN', 'Height'], ['HTN', 'Weight'], ['HTN', 'DGF'], ['Income', 'TRV', 'Age']], True)
def test_build_jtree_then_check_jtree_file(self):
self.TestA()
self.TestB()
def TestA(self):
"""
The dependency graph is a complete graph,
so there is only one clique in the junction tree
"""
jtree = JunctionTree(self.edges, self.nodes, self.jtree_path)
jtreepy = jtree.get_jtree()
#print jtreepy
self.assertEqual(len(jtreepy) == 3, True)
def TestB(self):
import os, time
from stat import *
st = os.stat(self.jtree_path)
now = time.time()
# TODO: Need to know this file is new modified
#self.assertEqual((st.st_mtime - now) < 100000, True)
| 35.244681
| 209
| 0.677634
| 479
| 3,313
| 4.475992
| 0.212944
| 0.08209
| 0.085821
| 0.045709
| 0.494869
| 0.444963
| 0.404851
| 0.383862
| 0.383862
| 0.286847
| 0
| 0.003159
| 0.140054
| 3,313
| 94
| 210
| 35.244681
| 0.749386
| 0.080893
| 0
| 0.295082
| 0
| 0
| 0.065744
| 0
| 0
| 0
| 0
| 0.010638
| 0.114754
| 1
| 0.196721
| false
| 0
| 0.114754
| 0
| 0.344262
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
5f10f60ec6549819aee0320bd7db378dfb94aabf
| 3,232
|
py
|
Python
|
src/bel_commons/explorer_toolbox.py
|
cthoyt/pybel-web
|
a27f30617b9209d5531a6b65760597f8d45e9957
|
[
"MIT"
] | 2
|
2019-07-17T16:17:44.000Z
|
2019-07-18T17:05:36.000Z
|
src/bel_commons/explorer_toolbox.py
|
cthoyt/pybel-web
|
a27f30617b9209d5531a6b65760597f8d45e9957
|
[
"MIT"
] | 3
|
2020-04-25T17:30:58.000Z
|
2020-04-25T17:32:11.000Z
|
src/bel_commons/explorer_toolbox.py
|
cthoyt/pybel-web
|
a27f30617b9209d5531a6b65760597f8d45e9957
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Constants for building the biological network explorer's transformations toolbox."""
from typing import List, Tuple
from pybel.struct.pipeline.decorators import mapped
# Default NetworkX explorer toolbox functions (name, button text, description)
_explorer_toolbox = (
('collapse_to_genes', 'Collapse to Genes', 'Collapse proteins and RNAs to genes'),
('collapse_all_variants', 'Collapse Variants', 'Collapse Variants to their Parent Nodes'),
('collapse_to_protein_interactions', 'Protein Interaction Network',
'Reduce the Network to Interactions between Proteins'),
('enrich_protein_and_rna_origins', 'Expand Protein Origins',
'Adds RNAs corresponding to Proteins, then adds Genes corresponding to RNAs and miRNAs'),
('prune_protein_rna_origins', 'Prune Genes/RNAs',
'Delete genes/RNAs that only have transcription/translation edges'),
('expand_periphery', 'Expand Periphery', 'Expand the periphery of the network'),
('expand_internal', 'Expand Internal', 'Adds missing edges between nodes in the network'),
('remove_isolated_nodes', 'Remove Isolated Nodes', 'Remove from the network all isolated nodes'),
('get_largest_component', 'Get Largest Component', 'Retain only the largest component and removes all others'),
('enrich_unqualified', 'Enrich unqualified edges', 'Adds unqualified edges from the universe'),
('remove_associations', 'Remove Associations', 'Remove associative relations'),
('remove_pathologies', 'Remove Pathologies', 'Removes all pathology nodes'),
('remove_biological_processes', 'Remove Biological Processes', 'Removes all biological process nodes'),
)
_bio2bel_functions = (
(
'enrich_rnas',
'Enrich RNA controllers from miRTarBase',
'Adds the miRNA controllers of RNA nodes from miRTarBase'
), (
'enrich_mirnas',
'Enrich miRNA targets',
'Adds the RNA targets of miRNA nodes from miRTarBase'
), (
'enrich_genes_with_families',
'Enrich Genes with Gene Family Membership',
'Adds the parents of HGNC Gene Families'
), (
'enrich_families_with_genes',
'Enrich Gene Family Membership',
'Adds the children to HGNC gene familes'
), (
'enrich_bioprocesses',
'Enrich Biological Process Hierarchy',
'Adds parent biological processes'
), (
'enrich_chemical_hierarchy',
'Enrich Chemical Hierarchy',
'Adds parent chemical entries'
), (
'enrich_proteins_with_enzyme_families',
'Add Enzyme Class Members',
'Adds enzyme classes for each protein'
), (
'enrich_enzymes',
'Enrich Enzyme Classes',
'Adds proteins corresponding to present ExPASy Enzyme codes'
)
)
def _function_is_registered(name: str) -> bool:
return name in mapped
def get_explorer_toolbox() -> List[Tuple[str, str, str]]:
"""Get the explorer toolbox list."""
explorer_toolbox = list(_explorer_toolbox)
explorer_toolbox.extend(
(func_name, title, description)
for func_name, title, description in _bio2bel_functions
if _function_is_registered(func_name)
)
return explorer_toolbox
| 40.911392
| 115
| 0.697401
| 363
| 3,232
| 6.035813
| 0.338843
| 0.05477
| 0.020539
| 0.020995
| 0.048836
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001173
| 0.20854
| 3,232
| 78
| 116
| 41.435897
| 0.855356
| 0.065594
| 0
| 0.107692
| 0
| 0
| 0.619348
| 0.104721
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030769
| false
| 0
| 0.030769
| 0.015385
| 0.092308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
5f20ed53742e88f3bc18e3804150cf7252de73ee
| 7,638
|
py
|
Python
|
src/python/codebay/common/runcommand.py
|
nakedible/vpnease-l2tp
|
0fcda6a757f2bc5c37f4753b3cd8b1c6d282db5c
|
[
"WTFPL"
] | 5
|
2015-04-16T08:36:17.000Z
|
2017-05-12T17:20:12.000Z
|
src/python/codebay/common/runcommand.py
|
nakedible/vpnease-l2tp
|
0fcda6a757f2bc5c37f4753b3cd8b1c6d282db5c
|
[
"WTFPL"
] | null | null | null |
src/python/codebay/common/runcommand.py
|
nakedible/vpnease-l2tp
|
0fcda6a757f2bc5c37f4753b3cd8b1c6d282db5c
|
[
"WTFPL"
] | 4
|
2015-03-19T14:39:51.000Z
|
2019-01-23T08:22:55.000Z
|
"""
Codebay process running utils.
@group Running commands: run, call
@group Preexec functions: chroot, cwd
@var PASS:
Specifies that the given file descriptor should be passed directly
to the parent. Given as an argument to run.
@var FAIL:
Specifies that if output is received for the given file descriptor,
an exception should be signalled. Given as an argument to
run.
@var STDOUT:
Specifies that standard error should be redirected to the same file
descriptor as standard out. Given as an argument to run.
"""
__docformat__ = 'epytext en'
import os
from codebay.common import subprocess
PASS = -1
FAIL = -2
STDOUT = -3
class RunException(Exception):
"""Running command failed.
@ivar rv: Return value of the command.
@ivar stdout: Captured stdout of the command or None.
@ivar stderr: Captured stderr of the command or None.
"""
class chroot:
"""Returns a function that will do a chroot to path when invoked."""
def __init__(self, path):
self.path = path
def __call__(self):
os.chroot(self.path)
def __repr__(self):
return '%s(%s)' % (str(self.__class__), repr(self.path))
class cwd:
"""Returns a function that will do a cwd to path when invoked."""
def __init__(self, path):
self.path = path
def __call__(self):
os.chdir(self.path)
def __repr__(self):
return '%s(%s)' % (str(self.__class__), repr(self.path))
def call(*args, **kw):
"""Convenience wrapper for calling run.
Positional arguments are converted to a list and given to run as
an argument. Keyword arguments are passed as is to run.
>>> call('echo','-n','foo')
[0, 'foo', '']
>>> call('exit 1', shell=True)
[1, '', '']
"""
return run(list(args), **kw)
def run(args, executable=None, cwd=None, env=None, stdin=None, stdout=None, stderr=None, shell=False, preexec=None, retval=None):
"""Wrapper for running commands.
run takes a lot of arguments and they are explained here.
>>> run(['echo','-n','foo'])
[0, 'foo', '']
>>> run('exit 1', shell=True)
[1, '', '']
@param args:
List of strings or a single string specifying the program and
the arguments to execute. It is mandatory.
@param executable:
Name of the executable to be passed in argv[0]. Defaults to the
first value of args.
@param cwd:
Working directory to execute the program in. Defaults to no
change. Executes before preexec.
@param env:
Environment to execute the process with. Defaults to inheriting
the environment of the current process.
@param stdin:
If None, process is executed with a pipe with no data given. If
a string, process is executed with a pipe with the string as
input. If PASS, process stdin is inherited from the current
process. Defaults to None.
@param stdout:
If None, process stdout is captured with a pipe and returned. If
PASS, process stdout is inherited from the current process. If
FAIL, process stdout is captured with a pipe and an exception is
raised if the process prints to stdout. Defaults to None.
@param stderr:
Same as above with one addition. If STDOUT, then stderr is
redirected to the same destination as stdout.
@param shell:
If False, the command is executed directly. If True, the
arguments are passed to the shell for interpretation. Defaults
to False.
@param preexec:
Can be used to specify things to do just before starting the new
child process. The argument should be a list or tuple, all of
the callables in the list are executed just before starting the
child process. Defaults to no function executed.
@param retval:
If None, no checks are performed on the child process' return
value. If FAIL, an exception is raised if the child process
return value is not zero. If a callable, the callable is invoked
with the child process return value as an argument and an
exception is raised if the callable returned False.
@return:
List of retval, stdout output string and stderr output
string. If stdout or stderr is not captured, None is returned
instead.
@raise RunException:
Raised if stdout output, stderr output or return value check
triggered a failure.
@raise ValueError:
Raised if illegal arguments are detected.
@raise OSError:
Raised if starting the child process failed.
"""
if isinstance(args, list):
popen_args = args
elif isinstance(args, str):
popen_args = [args]
else:
raise ValueError('Unknown value %s passed as args.' % repr(args))
if preexec is None:
preexec_fn = None
elif isinstance(preexec, (list, tuple)):
def do_preexec():
for f in preexec:
f()
preexec_fn = do_preexec
else:
raise ValueError('Unknown value %s passed as preexec.' % repr(preexec))
if stdin is None:
popen_stdin = subprocess.PIPE
popen_input = None
elif stdin is PASS:
popen_stdin = None
popen_input = None
elif isinstance(stdin, str):
popen_stdin = subprocess.PIPE
popen_input = stdin
else:
raise ValueError('Unknown value %s passed as stdin.' % repr(stdin))
if stdout is None:
popen_stdout = subprocess.PIPE
elif stdout is PASS:
popen_stdout = None
elif stdout is FAIL:
popen_stdout = subprocess.PIPE
else:
raise ValueError('Unknown value %s passed as stdout.' % repr(stdout))
if stderr is None:
popen_stderr = subprocess.PIPE
elif stderr is PASS:
popen_stderr = None
elif stderr is FAIL:
popen_stderr = subprocess.PIPE
elif stderr is STDOUT:
popen_stderr = subprocess.STDOUT
else:
raise ValueError('Unknown value %s passed as stderr.' % repr(stderr))
if retval is None:
rvcheck = None
elif retval is FAIL:
def do_check(i):
return i == 0
rvcheck = do_check
elif callable(retval):
rvcheck = retval
else:
raise ValueError('Unknown value %s passed as retval.' % repr(retval))
handle, rv = None, None
try:
handle = subprocess.Popen(popen_args,
executable=executable,
stdin=popen_stdin,
stdout=popen_stdout,
stderr=popen_stderr,
close_fds=True,
cwd=cwd,
env=env,
shell=shell,
preexec_fn=preexec_fn)
stdout, stderr = handle.communicate(input=popen_input)
finally:
if handle is not None:
rv = handle.wait()
if stdout is FAIL:
if stdout != '':
e = RunException('Process printed to stdout.')
e.rv = rv
e.stdout = stdout
e.stderr = stderr
raise e
if stderr is FAIL:
if stderr != '':
e = RunException('Process printed to stderr.')
e.rv = rv
e.stdout = stdout
e.stderr = stderr
raise e
if rvcheck is not None:
if not rvcheck(rv):
e = RunException('Process return value check failed.')
e.rv = rv
e.stdout = stdout
e.stderr = stderr
raise e
return [rv, stdout, stderr]
| 32.092437
| 129
| 0.615213
| 1,003
| 7,638
| 4.62014
| 0.192423
| 0.013811
| 0.024601
| 0.033664
| 0.287225
| 0.239102
| 0.200691
| 0.138325
| 0.071429
| 0.071429
| 0
| 0.002086
| 0.309505
| 7,638
| 237
| 130
| 32.227848
| 0.876564
| 0.45038
| 0
| 0.310345
| 0
| 0
| 0.079692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086207
| false
| 0.086207
| 0.017241
| 0.025862
| 0.172414
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
5f268730f61e34ddeec03c49cb3a27cf05cffa58
| 545
|
py
|
Python
|
SourceCode/Module2/escape_sequences.py
|
hackettccp/CIS106SampleCode
|
0717fa0f6dc0c48bc51f16ab44e7425b186a35c3
|
[
"MIT"
] | 1
|
2019-10-23T03:25:43.000Z
|
2019-10-23T03:25:43.000Z
|
SourceCode/Module2/escape_sequences.py
|
hackettccp/CIS106
|
0717fa0f6dc0c48bc51f16ab44e7425b186a35c3
|
[
"MIT"
] | null | null | null |
SourceCode/Module2/escape_sequences.py
|
hackettccp/CIS106
|
0717fa0f6dc0c48bc51f16ab44e7425b186a35c3
|
[
"MIT"
] | null | null | null |
"""
Demonstrates escape sequences in strings
"""
#Line Feed escape sequence.
output1 = "First part \n Second part"
print(output1)
#********************************#
print()
#Double quotes escape sequence.
output2 = "The book \"War and Peace\" is very long"
print(output2)
#********************************#
print()
#Single quote escape sequence.
output3 = 'That is Tom\'s bike'
print(output3)
#********************************#
print()
#Backslash escape sequence.
output4 = "A single backslash \\ will be inserted"
print(output4)
| 18.793103
| 51
| 0.577982
| 59
| 545
| 5.338983
| 0.644068
| 0.177778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017167
| 0.144954
| 545
| 28
| 52
| 19.464286
| 0.658798
| 0.455046
| 0
| 0.272727
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.636364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
5f2bba8707cde7a8ee1262b60500da6ac69cd76b
| 5,618
|
py
|
Python
|
study/python/pyqt/app/signup.py
|
cheenwe/blog
|
a866b3ab98aa58e3ed4a7624fbb72c8fd8dee790
|
[
"MIT"
] | 10
|
2016-09-28T03:22:41.000Z
|
2020-06-16T08:42:25.000Z
|
study/python/pyqt/app/signup.py
|
cheenwe/blog
|
a866b3ab98aa58e3ed4a7624fbb72c8fd8dee790
|
[
"MIT"
] | 12
|
2017-04-18T08:41:04.000Z
|
2020-06-10T02:54:58.000Z
|
study/python/pyqt/app/signup.py
|
cheenwe/blog
|
a866b3ab98aa58e3ed4a7624fbb72c8fd8dee790
|
[
"MIT"
] | 8
|
2016-09-28T03:03:32.000Z
|
2019-09-16T04:22:01.000Z
|
from PyQt5 import QtCore, QtGui, QtWidgets
from db import Db
class Ui_Signup(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.setFixedSize(638, 441)
# self.label = QtWidgets.QLabel(Dialog)
# self.label.setGeometry(QtCore.QRect(110, 190, 151, 31))
# self.label.setObjectName("label")
self.label_password = QtWidgets.QLabel(Dialog)
self.label_password.setGeometry(QtCore.QRect(110, 260, 151, 31))
self.label_password.setObjectName("label_password")
self.label_password2 = QtWidgets.QLabel(Dialog)
self.label_password2.setGeometry(QtCore.QRect(110, 300, 171, 31))
self.label_password2.setObjectName("label_password2")
self.label_email = QtWidgets.QLabel(Dialog)
self.label_email.setGeometry(QtCore.QRect(110, 230, 161, 31))
self.label_email.setObjectName("label_email")
# self.txtUsername = QtWidgets.QLineEdit(Dialog)
# self.txtUsername.setGeometry(QtCore.QRect(290, 190, 221, 27))
# self.txtUsername.setObjectName("txtUsername")
self.txtEmail = QtWidgets.QLineEdit(Dialog)
self.txtEmail.setGeometry(QtCore.QRect(290, 230, 221, 27))
self.txtEmail.setObjectName("txtEmail")
self.txtPassword = QtWidgets.QLineEdit(Dialog)
################## make the password invisible ############
self.txtPassword.setEchoMode(QtWidgets.QLineEdit.Password)
###########################################################
self.txtPassword.setGeometry(QtCore.QRect(290, 270, 221, 27))
self.txtPassword.setObjectName("txtPassword")
self.txtPassword2 = QtWidgets.QLineEdit(Dialog)
################## make the password2 invisible ############
self.txtPassword2.setEchoMode(QtWidgets.QLineEdit.Password)
###########################################################
self.txtPassword2.setGeometry(QtCore.QRect(290, 310, 221, 27))
self.txtPassword2.setObjectName("txtPassword2")
self.btnRegister = QtWidgets.QPushButton(Dialog)
self.btnRegister.setGeometry(QtCore.QRect(240, 360, 131, 41))
self.btnRegister.setObjectName("btnRegister")
################## register button#########################
self.btnRegister.clicked.connect(self.registerButton)
###########################################################
self.label_Heading = QtWidgets.QLabel(Dialog)
self.label_Heading.setGeometry(QtCore.QRect(120, 30, 431, 61))
self.label_Heading.setObjectName("label_Heading")
self.label_5 = QtWidgets.QLabel(Dialog)
self.label_5.setGeometry(QtCore.QRect(110, 150, 151, 31))
self.label_5.setObjectName("label_5")
self. label_name = QtWidgets.QLabel(Dialog)
self. label_name.setGeometry(QtCore.QRect(110, 150, 151, 31))
self. label_name.setObjectName(" label_name")
self.txtName = QtWidgets.QLineEdit(Dialog)
self.txtName.setGeometry(QtCore.QRect(290, 150, 221, 27))
self.txtName.setObjectName("txtName")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def registerButton(self):
name = self.txtName.text()
email = self.txtEmail.text()
# username = self.txtUsername.text()
password = self.txtPassword.text()
password2 = self.txtPassword2.text()
if self.checkFields(name,email,password):
self.showMessage("Error", "All fields must be filled")
else:
if(self.checkPassword(password,password2)):
insertDb = Db()
Db().insertTable(name,email,password)
self.showMessage("Success","Registration successul")
self.clearField()
else:
self.showMessage("Error","Passwords doesn't match")
def showMessage(self,title,msg):
msgBox = QtWidgets.QMessageBox()
msgBox.setIcon(QtWidgets.QMessageBox.Information)
#msgBox.setTitle(title)
msgBox.setText(msg)
msgBox.setStandardButtons(QtWidgets.QMessageBox.Ok)
msgBox.exec_()
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", " XXX系统"))
self.label_password.setText(_translate("Dialog", " 密码:"))
self.label_password2.setText(_translate("Dialog", "重复密码:"))
self.label_email.setText(_translate("Dialog", "邮箱:"))
self.btnRegister.setText(_translate("Dialog", "注册"))
self.label_Heading.setText(_translate("Dialog", " 账户注册"))
self. label_name.setText(_translate("Dialog", "用户名:"))
def loginPage(self):
self.loginWindow = QtWidgets.QDialog()
self.ui = Ui_Signup2()
self.ui.setupUi(self.loginWindow)
self.loginWindow.show()
def checkFields(self,name,email,password):
if(name=="" or email == "" or password== ""):
return True
############## check if password1 and password2 matches #############
def checkPassword(self,password, password2):
return password == password2
##################### clear fields ##################
def clearField(self):
self.txtPassword.setText(None)
self.txtName.setText(None)
self.txtEmail.setText(None)
self.txtPassword2.setText(None)
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Signup()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| 41.925373
| 73
| 0.619794
| 556
| 5,618
| 6.172662
| 0.244604
| 0.068182
| 0.083333
| 0.050991
| 0.14627
| 0.024476
| 0.024476
| 0.024476
| 0.024476
| 0
| 0
| 0.03892
| 0.208793
| 5,618
| 133
| 74
| 42.240602
| 0.733183
| 0.083482
| 0
| 0.020619
| 0
| 0
| 0.060935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082474
| false
| 0.28866
| 0.030928
| 0.010309
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
5f2c8d2aea105ec6207836197f28f050f9bd7157
| 318
|
py
|
Python
|
bot/__init__.py
|
sudomice/crypto-bot
|
51dcf66d79612f2ba8bdf5645005b143fbeda343
|
[
"MIT"
] | null | null | null |
bot/__init__.py
|
sudomice/crypto-bot
|
51dcf66d79612f2ba8bdf5645005b143fbeda343
|
[
"MIT"
] | null | null | null |
bot/__init__.py
|
sudomice/crypto-bot
|
51dcf66d79612f2ba8bdf5645005b143fbeda343
|
[
"MIT"
] | null | null | null |
import requests
from bot.constants import BASE_ENDPOINT
import cli.app
@cli.app.CommandLineApp
def bot(app):
ping_response = requests.get(BASE_ENDPOINT+'api/v3/ping')
print(f'{ping_response}:{ping_response.json()}')
# bot.add_param("-h", "--help", help="HELP me")
if __name__ == '__main__':
bot.run()
| 19.875
| 61
| 0.701258
| 46
| 318
| 4.543478
| 0.608696
| 0.172249
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003623
| 0.132075
| 318
| 15
| 62
| 21.2
| 0.753623
| 0.141509
| 0
| 0
| 0
| 0
| 0.210332
| 0.140221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.444444
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
a0282750f46f0e414d25e4e4d34acff48c249677
| 843
|
py
|
Python
|
h1st_contrib/cli/__init__.py
|
h1st-ai/h1st-contrib
|
38fbb1fff4513bb3433bc12f2b436836e5e51c80
|
[
"Apache-2.0"
] | 1
|
2022-02-19T18:55:43.000Z
|
2022-02-19T18:55:43.000Z
|
h1st_contrib/cli/__init__.py
|
h1st-ai/h1st-contrib
|
38fbb1fff4513bb3433bc12f2b436836e5e51c80
|
[
"Apache-2.0"
] | null | null | null |
h1st_contrib/cli/__init__.py
|
h1st-ai/h1st-contrib
|
38fbb1fff4513bb3433bc12f2b436836e5e51c80
|
[
"Apache-2.0"
] | null | null | null |
"""H1st CLI."""
import click
from .pred_maint import h1st_pmfp_cli
@click.group(name='h1st',
cls=click.Group,
commands={
'pmfp': h1st_pmfp_cli,
},
# Command kwargs
context_settings=None,
# callback=None,
# params=None,
help='H1st CLI >>>',
epilog='^^^ H1st CLI',
short_help='H1st CLI',
options_metavar='[OPTIONS]',
add_help_option=True,
no_args_is_help=True,
hidden=False,
deprecated=False,
# Group/MultiCommand kwargs
invoke_without_command=False,
subcommand_metavar='H1ST_SUB_COMMAND',
chain=False,
result_callback=None)
def h1st_cli():
"""H1st CLI."""
| 24.085714
| 51
| 0.498221
| 80
| 843
| 5
| 0.5125
| 0.105
| 0.055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019646
| 0.396204
| 843
| 34
| 52
| 24.794118
| 0.766208
| 0.105575
| 0
| 0
| 0
| 0
| 0.087838
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| true
| 0
| 0.095238
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
a030c83c63bfd7833e9eefaa8a970b32e999331c
| 5,111
|
py
|
Python
|
tests/cli/test_database.py
|
julienc91/dbtrigger
|
d06916a019641377bf3d45b2e8e38399643450db
|
[
"MIT"
] | null | null | null |
tests/cli/test_database.py
|
julienc91/dbtrigger
|
d06916a019641377bf3d45b2e8e38399643450db
|
[
"MIT"
] | null | null | null |
tests/cli/test_database.py
|
julienc91/dbtrigger
|
d06916a019641377bf3d45b2e8e38399643450db
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import uuid
import pytest
from dbtrigger.cli import DatabaseCli, QueryCli, ServerCli
from dbtrigger.config import settings
from dbtrigger.models import Database
@pytest.fixture(autouse=True)
def add_server(server):
ServerCli.add(server.identifier, server.hostname, server.dialect)
def compare_databases(db1, db2):
assert db1.identifier == db2.identifier
assert db1.server.identifier == db2.server.identifier
assert db1.name == db2.name
assert db1.username == db2.username
assert db1.password == db2.password
return True
def test_list(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
DatabaseCli.list()
def test_list_no_databasess():
DatabaseCli.list()
def add_database(database, server):
assert len(settings.databases) == 0
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
assert len(settings.databases) == 1
assert compare_databases(database, settings.databases[database.identifier])
def test_add_database_duplicate(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
with pytest.raises(ValueError):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
assert len(settings.databases) == 1
def test_add_database_not_existing_server(database):
with pytest.raises(ValueError):
DatabaseCli.add(database.identifier, str(uuid.uuid4()), database.name, database.username, database.password)
assert len(settings.databases) == 0
def test_delete_database(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
DatabaseCli.delete(database.identifier)
assert len(settings.databases) == 0
def test_delete_database_with_queries(database, server, query):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
QueryCli.add(query.identifier, database.identifier, query.query)
assert len(settings.databases) == 1
assert len(settings.queries) == 1
DatabaseCli.delete(database.identifier)
assert len(settings.databases) == 0
assert len(settings.queries) == 0
def test_delete_database_not_existing(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
DatabaseCli.delete(database.identifier)
with pytest.raises(ValueError):
DatabaseCli.delete(database.identifier)
def test_update_database_not_existing(database, server):
with pytest.raises(ValueError):
DatabaseCli.update(database.identifier, server, 'new name', 'new username', 'new password')
def test_update_database_not_existing_server(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
with pytest.raises(ValueError):
DatabaseCli.update(database.identifier, str(uuid.uuid4()), 'new name', 'new username', 'new password')
def test_rename_database(database, server):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
new_identifier = str(uuid.uuid4())
DatabaseCli.rename(database.identifier, new_identifier)
assert len(settings.databases) == 1
renamed_db = settings.databases[new_identifier]
database.identifier = new_identifier
assert compare_databases(database, renamed_db)
def test_rename_database_with_queries(database, server, query):
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
QueryCli.add(query.identifier, database.identifier, query.query)
new_identifier = str(uuid.uuid4())
DatabaseCli.rename(database.identifier, new_identifier)
assert len(settings.databases) == 1
assert len(settings.queries) == 1
assert settings.queries[query.identifier].database.identifier == new_identifier
def test_rename_database_not_existing(database):
new_identifier = str(uuid.uuid4())
with pytest.raises(ValueError):
DatabaseCli.rename(database.identifier, new_identifier)
def test_rename_database_duplicated(database, server):
other_identifier = str(uuid.uuid4())
other_db = Database(other_identifier, server, database.name, database.username, database.password)
DatabaseCli.add(database.identifier, server.identifier, database.name, database.username, database.password)
DatabaseCli.add(other_db.identifier, server.identifier, other_db.name, other_db.username, other_db.password)
assert len(settings.databases) == 2
with pytest.raises(ValueError):
DatabaseCli.rename(database.identifier, other_identifier)
assert compare_databases(database, settings.databases[database.identifier])
assert compare_databases(other_db, settings.databases[other_identifier])
| 38.428571
| 116
| 0.772256
| 593
| 5,111
| 6.53457
| 0.10118
| 0.130065
| 0.067097
| 0.093935
| 0.761032
| 0.677419
| 0.66271
| 0.66271
| 0.507871
| 0.477935
| 0
| 0.007154
| 0.124829
| 5,111
| 132
| 117
| 38.719697
| 0.859155
| 0.004109
| 0
| 0.511364
| 0
| 0
| 0.012579
| 0
| 0
| 0
| 0
| 0
| 0.261364
| 1
| 0.181818
| false
| 0.193182
| 0.056818
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
a04797fdee39fb437fee089feeeacafda35a132b
| 2,064
|
py
|
Python
|
experiments/cntJson.py
|
MadhuNimmo/jalangi2
|
bbe8350b8ede5d978c1b3923780f277aacb1d074
|
[
"Apache-2.0"
] | null | null | null |
experiments/cntJson.py
|
MadhuNimmo/jalangi2
|
bbe8350b8ede5d978c1b3923780f277aacb1d074
|
[
"Apache-2.0"
] | null | null | null |
experiments/cntJson.py
|
MadhuNimmo/jalangi2
|
bbe8350b8ede5d978c1b3923780f277aacb1d074
|
[
"Apache-2.0"
] | null | null | null |
import json
import sys
#filename='/home/anon/js-acg-examples-master/Knockout_test_results/StatWala.json'
cnt=0
cnt2=0
#item_dict = json.loads(filename)
#import json
filename1 = sys.argv[1]
#filename2 = sys.argv[2]
#out_key = filename2.read().split('\n')
'''out_key = [line.rstrip('\n') for line in open(filename2)]'''
with open(filename1) as f1:
data = json.load(f1)
#print(len(data))
listy = []
'''with open(filename2) as f2:
data2 = json.load(f2)'''
'''for out in out_key:'''
'''for key,value in data.items():
if ("app.js") in key or ("base.js") in key:
#print(key)
for k,v in value.items():
cnt+=len(v)'''
for key,value in data.items():
for k,v in value.items():
cnt+=len(v)
'''for key, value in data.items():
cnt+=len(value)
for item in value:
if(item == "Var(/Users/UserXYZ/Documents/todomvc-master/examples/angularjs/node_modules/angular/angular.js@1633:48390-48494, %ssa_val 16)"):
listy.append(key)
#print(key.find("jquery.js")>-1 & item.find("jquery.js")>-1)
#if((key.find("app.js")>-1 & item.find("base.js")>-1)==True):
#cnt2+=1
listy2=[]
for key, value in data.items():
if(key=="Var(/Users/UserXYZ/Documents/todomvc-master/examples/angularjs/node_modules/angular/angular.js@7750:270474-289480, [childTranscludeFn])"):
for item in value:
if(item in set(listy)):
listy2.append(item)'''
print(cnt)
'''print(cnt2)
print(len(listy))
print(len(listy2))'''
'''for key1,value1 in data1.items():
for key2,value2 in data2.items():
if(key1==key2 or key1 in key2 ):
for k1,v1 in value1.items():
for k2,v2 in value2.items():
if(v1!=v2):
print(key1,value1)'''
#if two json obs are same
'''a, b = json.dumps(data1, sort_keys=True), json.dumps(data2, sort_keys=True)
print(a == b)'''
| 34.983051
| 157
| 0.562984
| 290
| 2,064
| 3.968966
| 0.331034
| 0.020851
| 0.038228
| 0.045178
| 0.297133
| 0.297133
| 0.262381
| 0.220678
| 0.220678
| 0.220678
| 0
| 0.052666
| 0.26405
| 2,064
| 59
| 158
| 34.983051
| 0.705069
| 0.108527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
a04c74859e147b481962d7389a2144e8a4b8236e
| 1,151
|
py
|
Python
|
tests/integration/test_issue_1447.py
|
alexey-tereshenkov-oxb/pex
|
2e2d1e50e604fdee48b0d51aea482ca255521ff0
|
[
"Apache-2.0"
] | 2,160
|
2015-01-06T17:57:39.000Z
|
2022-03-30T19:59:01.000Z
|
tests/integration/test_issue_1447.py
|
alexey-tereshenkov-oxb/pex
|
2e2d1e50e604fdee48b0d51aea482ca255521ff0
|
[
"Apache-2.0"
] | 1,242
|
2015-01-22T14:56:46.000Z
|
2022-03-31T18:02:38.000Z
|
tests/integration/test_issue_1447.py
|
Satertek/pex
|
64de1c4cf031118ef446ac98a8c164c91c23bb9b
|
[
"Apache-2.0"
] | 248
|
2015-01-15T13:34:50.000Z
|
2022-03-26T01:24:18.000Z
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import shutil
import subprocess
import sys
from pex.pex_info import PexInfo
from pex.testing import run_pex_command
from pex.typing import TYPE_CHECKING
from pex.variables import unzip_dir
if TYPE_CHECKING:
from typing import Any
def test_layout_identification(tmpdir):
# type: (Any) -> None
pex_root = os.path.join(str(tmpdir), "pex_root")
pex_file = os.path.join(str(tmpdir), "a.pex")
run_pex_command(
args=["-o", pex_file, "--pex-root", pex_root, "--runtime-pex-root", pex_root]
).assert_success()
pex_hash = PexInfo.from_pex(pex_file).pex_hash
assert pex_hash is not None
expected_unzip_dir = unzip_dir(pex_root, pex_hash)
assert not os.path.exists(expected_unzip_dir)
subprocess.check_call(args=[pex_file, "-c", ""])
assert os.path.isdir(expected_unzip_dir)
shutil.rmtree(expected_unzip_dir)
os.chmod(pex_file, 0o644)
subprocess.check_call(args=[sys.executable, pex_file, "-c", ""])
assert os.path.isdir(expected_unzip_dir)
| 29.512821
| 85
| 0.730669
| 174
| 1,151
| 4.603448
| 0.373563
| 0.069913
| 0.099875
| 0.032459
| 0.149813
| 0.102372
| 0.102372
| 0.102372
| 0.102372
| 0.102372
| 0
| 0.010309
| 0.157255
| 1,151
| 38
| 86
| 30.289474
| 0.815464
| 0.126846
| 0
| 0.076923
| 0
| 0
| 0.046953
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 1
| 0.038462
| false
| 0
| 0.346154
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
a060c7c4400126644e1d48eb927d2de5fe556729
| 4,915
|
py
|
Python
|
models/deeplab_v2.py
|
iamsofancyyoualreadyknow/IHC-based-labels-generation-and-semantic-segmentation-for-lung-cancer
|
57904544c6d6b43dcd5937afeb474c0a47456d98
|
[
"MIT"
] | null | null | null |
models/deeplab_v2.py
|
iamsofancyyoualreadyknow/IHC-based-labels-generation-and-semantic-segmentation-for-lung-cancer
|
57904544c6d6b43dcd5937afeb474c0a47456d98
|
[
"MIT"
] | null | null | null |
models/deeplab_v2.py
|
iamsofancyyoualreadyknow/IHC-based-labels-generation-and-semantic-segmentation-for-lung-cancer
|
57904544c6d6b43dcd5937afeb474c0a47456d98
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
# import slim
# conv layers
layers = tf.contrib.layers
arg_scope = tf.contrib.framework.arg_scope
def vgg_conv_dilation(inputs, weight_decay=0.0005):
with arg_scope([layers.convolution2d, layers.max_pool2d], padding='SAME'):
with arg_scope([layers.convolution2d], rate=1,
weights_initializer=tf.truncated_normal_initializer(stddev=0.01),
weights_regularizer=layers.l2_regularizer(weight_decay)):
net = layers.convolution2d(inputs, 64, [3, 3], scope='vgg_16/conv1/conv1_1' )
net = layers.convolution2d(net, 64, [3, 3], scope='vgg_16/conv1/conv1_2' )
net = layers.max_pool2d(net, [3, 3], stride=[2,2], scope='vgg_16/pool1')
net = layers.convolution2d(net, 128, [3, 3], scope='vgg_16/conv2/conv2_1' )
net = layers.convolution2d(net, 128, [3, 3], scope='vgg_16/conv2/conv2_2' )
net = layers.max_pool2d(net, [3, 3], stride=[2,2], scope='vgg_16/pool2')
net = layers.convolution2d(net, 256, [3, 3], scope='vgg_16/conv3/conv3_1' )
net = layers.convolution2d(net, 256, [3, 3], scope='vgg_16/conv3/conv3_2' )
net = layers.convolution2d(net, 256, [3, 3], scope='vgg_16/conv3/conv3_3' )
net = layers.max_pool2d(net, [3, 3], stride=[2,2], scope='vgg_16/pool3')
net = layers.convolution2d(net, 512, [3, 3], scope='vgg_16/conv4/conv4_1' )
net = layers.convolution2d(net, 512, [3, 3], scope='vgg_16/conv4/conv4_2' )
net = layers.convolution2d(net, 512, [3, 3], scope='vgg_16/conv4/conv4_3' )
net = layers.max_pool2d(net, [3, 3], stride=[1,1], scope='vgg_16/pool4')
net = layers.convolution2d(net, 512, [3, 3], rate=2, scope='vgg_16/conv5/conv5_1' )
net = layers.convolution2d(net, 512, [3, 3], rate=2, scope='vgg_16/conv5/conv5_2' )
net = layers.convolution2d(net, 512, [3, 3], rate=2, scope='vgg_16/conv5/conv5_3' )
return net
def deeplab_top(inputs, num_classes=34, dropout=False, weight_decay=0.0005):
with arg_scope([layers.convolution2d, layers.max_pool2d], padding='SAME'):
with arg_scope([layers.convolution2d], rate=1,
weights_initializer=tf.truncated_normal_initializer(stddev=0.01),
weights_regularizer=layers.l2_regularizer(weight_decay),
biases_initializer=tf.constant_initializer(value=0, dtype=tf.float32),
biases_regularizer=layers.l2_regularizer(weight_decay)):
with arg_scope([layers.dropout], keep_prob = 0.5, is_training=dropout):
pool5 = layers.max_pool2d(inputs, [3, 3], scope='vgg_16/pool5')
#fc61: dilation = 6
net = layers.convolution2d(pool5, 1024, [3, 3], rate=6, scope='fc6_1')
net = layers.dropout(net, scope='drop6_1')
#fc71: dilation = 1
net = layers.convolution2d(net, 1024, [1, 1], scope='fc7_1')
net = layers.dropout(net, scope='drop7_1')
#fc81:
fc8_1 = layers.convolution2d(net, num_classes, [1, 1], scope='fc8_1')
#fc62: dilation = 12
net = layers.convolution2d(pool5, 1024, [3, 3], rate=12, scope='fc6_2')
net = layers.dropout(net, scope='drop6_2')
#fc72: dilation = 1
net = layers.convolution2d(net, 1024, [1, 1], scope='fc7_2')
net = layers.dropout(net, scope='drop7_2')
#fc82
fc8_2 = layers.convolution2d(net, num_classes, [1, 1], scope='fc8_2')
#fc63: dilation = 18
net = layers.convolution2d(pool5, 1024, [3, 3], rate=18, scope='fc6_3')
net = layers.dropout(net, scope='drop6_3')
#fc73: dilation = 1
net = layers.convolution2d(net, 1024, [1, 1], scope='fc7_3')
net = layers.dropout(net, scope='drop7_3')
#fc83:
fc8_3 = layers.convolution2d(net, num_classes, [1, 1], scope='fc8_3')
#fc64: dilation = 24
net = layers.convolution2d(pool5, 1024, [3, 3], rate=24, scope='fc6_4')
net = layers.dropout(net, scope='drop6_4')
#fc74: dilation = 1
net = layers.convolution2d(net, 1024, [1, 1], scope='fc7_4')
net = layers.dropout(net, scope='drop7_4')
#fc84:
fc8_4 = layers.convolution2d(net, num_classes, [1, 1], scope='fc8_4')
net = tf.add_n([fc8_1, fc8_2, fc8_3, fc8_4])
return net
def deeplab_v2(inputs, num_classes=34, dropout=False, weight_decay=0.0005):
feature = vgg_conv_dilation(inputs)
seg = deeplab_top(feature, num_classes=num_classes, dropout=dropout, weight_decay=weight_decay)
return seg
| 56.494253
| 99
| 0.585554
| 648
| 4,915
| 4.266975
| 0.154321
| 0.107414
| 0.167089
| 0.144665
| 0.760579
| 0.74575
| 0.644123
| 0.644123
| 0.57179
| 0.499819
| 0
| 0.110706
| 0.274059
| 4,915
| 87
| 100
| 56.494253
| 0.664238
| 0.038454
| 0
| 0.131148
| 0
| 0
| 0.094228
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04918
| false
| 0
| 0.016393
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
a068b901b478d011dc44a977f7e4cc0f17632eaf
| 11,386
|
py
|
Python
|
visualize_high_LOO/visualize_high_LOO_cifar_norb.py
|
mkuchnik/Efficient_Augmentation
|
a82190c02509682c34f2df782fb58f8ffd3b11da
|
[
"MIT"
] | 11
|
2019-05-09T22:43:29.000Z
|
2021-01-13T22:26:48.000Z
|
visualize_high_LOO/visualize_high_LOO_cifar_norb.py
|
mkuchnik/Efficient_Augmentation
|
a82190c02509682c34f2df782fb58f8ffd3b11da
|
[
"MIT"
] | 1
|
2020-10-07T14:03:47.000Z
|
2020-10-07T14:03:47.000Z
|
visualize_high_LOO/visualize_high_LOO_cifar_norb.py
|
mkuchnik/Efficient_Augmentation
|
a82190c02509682c34f2df782fb58f8ffd3b11da
|
[
"MIT"
] | 6
|
2019-03-05T02:26:01.000Z
|
2021-05-11T14:35:41.000Z
|
import pprint
import time
import keras
import numpy as np
import joblib
import dataset_loaders
import selection_policy
import augmentations
import experiments
import experiments_util
import featurized_classifiers
import visualization_util
import matplotlib.pyplot as plt
mem = joblib.Memory(cachedir="./cache", verbose=1)
def run_test(classes,
rounds,
n_aug_sample_points,
n_train,
n_jobs,
cv,
use_GPU,
batch_size,
dataset,
aug_transformation,
aug_kw_args,
logistic_reg__C,
CNN_extractor_max_iter,
use_loss,
experiment_configs,
results_filename,
):
run_params = {
"classes": classes,
"rounds": rounds,
"n_aug_sample_points": n_aug_sample_points,
"n_train": n_train,
"n_jobs": n_jobs,
"cv": cv,
"use_GPU": use_GPU,
"batch_size": batch_size,
"dataset": dataset.name,
"aug_transformation": aug_transformation.name,
"aug_kw_args": aug_kw_args,
"logistic_reg__C": logistic_reg__C,
"CNN_extractor_max_iter": CNN_extractor_max_iter,
"use_loss": use_loss,
"experiment_configs": experiment_configs,
"results_filename": results_filename,
}
pprint.pprint(run_params)
assert n_aug_sample_points
(x_train, y_train), (x_test, y_test) = experiments_util.prepare_dataset(
dataset,
classes,
n_train,
)
print("Train class breakdown: {}".format(
np.unique(y_train, return_counts=True))
)
print("Test class breakdown: {}".format(
np.unique(y_test, return_counts=True))
)
aug_f = augmentations.get_transformation(aug_transformation)
(orig_and_auged_x_train,
orig_and_auged_y_train,
orig_and_auged_idxs_train) = \
experiments_util.poison_dataset(x_train,
y_train,
aug_f,
aug_kw_args)
(orig_and_auged_x_test,
orig_and_auged_y_test,
orig_and_auged_idxs_test) = \
experiments_util.poison_dataset(x_test,
y_test,
aug_f,
aug_kw_args)
print("x_train", x_train.shape)
print("orig_and_auged_x_train", orig_and_auged_x_train.shape)
feature_clf = featurized_classifiers.build_featurized_ResNet_feature_clf(
CNN_extractor_max_iter,
use_GPU,
batch_size)
@mem.cache
def transform_features(x, y):
return feature_clf.fit_transform(x, y=y)
featurized_x_train = transform_features(x=x_train, y=y_train)
featurized_y_train = y_train
featurized_x_test = transform_features(x=x_test, y=y_test)
featurized_y_test = y_test
orig_and_auged_featurized_x_train = transform_features(x=orig_and_auged_x_train,
y=orig_and_auged_y_train)
orig_and_auged_featurized_y_train = orig_and_auged_y_train
orig_and_auged_featurized_x_train_to_source_idxs = orig_and_auged_idxs_train
orig_and_auged_featurized_x_test = transform_features(x=orig_and_auged_x_test,
y=orig_and_auged_y_test)
orig_and_auged_featurized_y_test = orig_and_auged_y_test
orig_and_auged_featurized_x_test_to_source_idxs = orig_and_auged_idxs_test
clf = featurized_classifiers.build_logistic_reg_clf(
logistic_reg__C,
cv,
)
(no_aug_no_poison_acc,
poisoned_acc,
all_aug_train_poisoned_acc,
aug_scores,
after_aug_scores,
best_params,
training_total_time) = experiments_util.train_and_score_clf(
clf,
featurized_x_train,
y_train,
featurized_x_test,
y_test,
orig_and_auged_featurized_x_train,
orig_and_auged_featurized_y_train,
orig_and_auged_featurized_x_test,
orig_and_auged_featurized_y_test,
use_loss,
cv,
)
training_end_time = time.time()
img_ranks = np.argsort(np.abs(aug_scores))
top_n = 100
good_imgs = x_train[img_ranks][-top_n:]
bad_imgs = x_train[img_ranks][:top_n]
print("scores", aug_scores)
print("scores", aug_scores.shape)
print("ranks", img_ranks)
print("ranks", img_ranks.shape)
print("good", good_imgs.shape)
print("bad", bad_imgs.shape)
figures = {"{}".format(i): img for i, img in enumerate(good_imgs)}
assert len(figures) == top_n
visualization_util.plot_figures(figures, nrows=10, ncols=10)
plt.savefig("good_images.pdf", bbox_inches="tight", pad_inches=0)
plt.show()
figures = {"{}".format(i): img for i, img in enumerate(bad_imgs)}
assert len(figures) == top_n
visualization_util.plot_figures(figures, nrows=10, ncols=10)
plt.savefig("bad_images.pdf", bbox_inches="tight", pad_inches=0)
plt.show()
def main():
rounds = 5
#rounds = 3
n_aug_sample_points = [1, 10, 50, 100, 250, 500, 750, 1000]
n_train = 1000
n_jobs = 1
cv = 1
use_GPU = True
batch_size = 128
CNN_extractor_max_iter = 40
# use_loss = False
use_loss = True
# Can use multiple valus of C for cross-validation
logistic_reg__Cs = [[10], [100], [1000]]
classes_datasets = [
# ((0, 1), dataset_loaders.Dataset.NORB),
((0, 1), dataset_loaders.Dataset.CIFAR10),
]
selected_augmentations = [
#(augmentations.Image_Transformation.translate, {"mag_aug": 6}),
(augmentations.Image_Transformation.translate, {"mag_aug": 3}),
#(augmentations.Image_Transformation.rotate, {"mag_aug": 5,
# "n_rotations": 4}),
#(augmentations.Image_Transformation.crop, {"mag_augs": [2]}),
]
experiment_configs = [
("baseline", False, False),
("random_proportional", False, False),
("random_proportional", False, True),
("random_proportional", True, False),
("random_proportional", True, True),
("random_inverse_proportional", False, False),
#("random_inverse_proportional", True, False),
#("random_softmax_proportional", False, False),
#("random_softmax_proportional", False, True),
#("random_softmax_proportional", True, False),
#("random_softmax_proportional", True, True),
#("random_inverse_softmax_proportional", False, False),
#("random_inverse_softmax_proportional", True, False),
("deterministic_proportional", False, False),
("deterministic_proportional", False, True),
("deterministic_proportional", True, False),
("deterministic_proportional", True, True),
("deterministic_inverse_proportional", False, False),
("deterministic_inverse_proportional", True, False),
]
for logistic_reg__C in logistic_reg__Cs:
for classes, dataset in classes_datasets:
for aug_transformation, aug_kw_args in selected_augmentations:
dataset_class_str = experiments_util.classes_to_class_str(classes)
print("Class types: {}".format(dataset_class_str))
reg_str = "-".join(list(map(str, logistic_reg__C)))
results_filename = "aug_results_{}_{}_{}_{}{}".format(
dataset.name,
dataset_class_str,
aug_transformation.name,
reg_str,
"_loss" if use_loss else "",
)
run_test(classes,
rounds,
n_aug_sample_points,
n_train,
n_jobs,
cv,
use_GPU,
batch_size,
dataset,
aug_transformation,
aug_kw_args,
logistic_reg__C,
CNN_extractor_max_iter,
use_loss,
experiment_configs,
results_filename,
)
use_loss = False
# Can use multiple valus of C for cross-validation
logistic_reg__Cs = [[10], [100], [1000]]
classes_datasets = [
# ((0, 1), dataset_loaders.Dataset.NORB),
((0, 1), dataset_loaders.Dataset.CIFAR10),
]
selected_augmentations = [
#(augmentations.Image_Transformation.translate, {"mag_aug": 6}),
#(augmentations.Image_Transformation.rotate, {"mag_aug": 5,
# "n_rotations": 4}),
#(augmentations.Image_Transformation.crop, {"mag_augs": [2]}),
(augmentations.Image_Transformation.compose, {"n_aug": 10}),
]
experiment_configs = [
("baseline", False, False),
("random_proportional", False, False),
("random_proportional", False, True),
("random_proportional", True, False),
("random_proportional", True, True),
("random_inverse_proportional", False, False),
#("random_inverse_proportional", True, False),
#("random_softmax_proportional", False, False),
#("random_softmax_proportional", False, True),
#("random_softmax_proportional", True, False),
#("random_softmax_proportional", True, True),
#("random_inverse_softmax_proportional", False, False),
#("random_inverse_softmax_proportional", True, False),
("deterministic_proportional", False, False),
("deterministic_proportional", False, True),
("deterministic_proportional", True, False),
("deterministic_proportional", True, True),
("deterministic_inverse_proportional", False, False),
("deterministic_inverse_proportional", True, False),
]
for logistic_reg__C in logistic_reg__Cs:
for classes, dataset in classes_datasets:
for aug_transformation, aug_kw_args in selected_augmentations:
dataset_class_str = experiments_util.classes_to_class_str(classes)
print("Class types: {}".format(dataset_class_str))
reg_str = "-".join(list(map(str, logistic_reg__C)))
results_filename = "aug_results_{}_{}_{}_{}{}".format(
dataset.name,
dataset_class_str,
aug_transformation.name,
reg_str,
"_loss" if use_loss else "",
)
run_test(classes,
rounds,
n_aug_sample_points,
n_train,
n_jobs,
cv,
use_GPU,
batch_size,
dataset,
aug_transformation,
aug_kw_args,
logistic_reg__C,
CNN_extractor_max_iter,
use_loss,
experiment_configs,
results_filename,
)
if __name__ == "__main__":
main()
| 36.031646
| 84
| 0.584138
| 1,211
| 11,386
| 5.045417
| 0.143683
| 0.029787
| 0.051064
| 0.036007
| 0.741244
| 0.715548
| 0.674468
| 0.626514
| 0.619476
| 0.565466
| 0
| 0.011546
| 0.323028
| 11,386
| 315
| 85
| 36.146032
| 0.781136
| 0.120938
| 0
| 0.494163
| 0
| 0
| 0.101593
| 0.049294
| 0
| 0
| 0
| 0
| 0.011673
| 1
| 0.011673
| false
| 0
| 0.050584
| 0.003891
| 0.066148
| 0.054475
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
a08113f70d1b07cff7761da1bc92b7750832a572
| 1,508
|
py
|
Python
|
q2_fondue/get_all.py
|
misialq/q2-fondue
|
a7a541ee017381b34d38ef766de39d5d62588465
|
[
"BSD-3-Clause"
] | 10
|
2022-03-21T16:07:22.000Z
|
2022-03-31T09:33:48.000Z
|
q2_fondue/get_all.py
|
misialq/q2-fondue
|
a7a541ee017381b34d38ef766de39d5d62588465
|
[
"BSD-3-Clause"
] | null | null | null |
q2_fondue/get_all.py
|
misialq/q2-fondue
|
a7a541ee017381b34d38ef766de39d5d62588465
|
[
"BSD-3-Clause"
] | 4
|
2022-03-21T06:51:44.000Z
|
2022-03-29T15:56:14.000Z
|
# ----------------------------------------------------------------------------
# Copyright (c) 2022, Bokulich Laboratories.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import qiime2 as q2
import pandas as pd
import threading
from q2_fondue.utils import handle_threaded_exception
from qiime2 import Artifact
threading.excepthook = handle_threaded_exception
def get_all(ctx, accession_ids, email, n_jobs=1, retries=2, log_level='INFO'):
# get required methods
get_metadata = ctx.get_action('fondue', 'get_metadata')
get_sequences = ctx.get_action('fondue', 'get_sequences')
# fetch metadata
metadata, failed_ids = get_metadata(
accession_ids, email, n_jobs, log_level
)
failed_ids_df = failed_ids.view(pd.DataFrame)
# fetch sequences - use metadata to get run ids, regardless if
# runs or projects were requested
run_ids = q2.Artifact.import_data(
'NCBIAccessionIDs', pd.Series(metadata.view(pd.DataFrame).index)
)
seq_single, seq_paired, failed_ids, = get_sequences(
run_ids, email, retries, n_jobs, log_level
)
failed_ids_df = failed_ids_df.append(failed_ids.view(pd.DataFrame))
if failed_ids_df.shape[0] > 0:
failed_ids = Artifact.import_data('SRAFailedIDs', failed_ids_df)
return metadata, seq_single, seq_paired, failed_ids
| 32.085106
| 78
| 0.657825
| 190
| 1,508
| 4.978947
| 0.431579
| 0.104651
| 0.05814
| 0.038055
| 0.2537
| 0.12685
| 0.069767
| 0.069767
| 0.069767
| 0
| 0
| 0.010326
| 0.165119
| 1,508
| 46
| 79
| 32.782609
| 0.741064
| 0.301724
| 0
| 0
| 0
| 0
| 0.066282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.304348
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
a0a2f155643acffd9a5b5d44e3b912311ab75ced
| 2,084
|
py
|
Python
|
runai/mp/keras/layers/core.py
|
bamps53/runai
|
0c868160f64e1e063c6eb6f660d42917322d40c5
|
[
"MIT"
] | 86
|
2020-01-23T18:56:41.000Z
|
2022-02-14T22:32:08.000Z
|
runai/mp/keras/layers/core.py
|
bamps53/runai
|
0c868160f64e1e063c6eb6f660d42917322d40c5
|
[
"MIT"
] | 18
|
2020-01-24T17:55:18.000Z
|
2021-12-01T01:01:32.000Z
|
runai/mp/keras/layers/core.py
|
bamps53/runai
|
0c868160f64e1e063c6eb6f660d42917322d40c5
|
[
"MIT"
] | 12
|
2020-02-03T14:30:44.000Z
|
2022-01-08T16:06:59.000Z
|
import keras.backend as K
import keras.layers
import runai.mp
from .keep import Keep
from .parallelised import Parallelised
Activation = Keep.create('Activation')
class Dense(Parallelised, keras.layers.Dense):
def build(self, input_shape):
assert len(input_shape) == 2 # TODO(levosos): support more than two dimensions
total_cin = input_shape[-1]
cin, cout, c = self.calculate_cs(
cin=total_cin,
cout=self.units)
self.kernels = self.add_weights(
name='kernel',
shape=(cin, cout),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint) # TODO(levosos): is this ok?
if self.use_bias:
self.biases = self.add_weights(
name='bias',
shape=(c,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint) # TODO(levosos): is this ok?
self.input_spec = keras.layers.InputSpec(ndim=2, axes={-1: total_cin}) # TODO(levosos): use 'min_ndim' once supporting more than two dimensions
self.built = True
def call(self, inputs):
inputs = self.inputs(inputs, channel_axis=-1)
outputs = self.parallelise(
lambda input, kernel: K.dot(input, kernel),
inputs,
self.kernels)
if runai.mp.method == runai.mp.Method.Cin:
outputs = self.reduce_split(outputs, channel_axis=-1)
if self.use_bias:
outputs = self.parallelise(
lambda output, bias: K.bias_add(output, bias, data_format='channels_last'),
outputs,
self.biases)
if self.activation is not None:
outputs = self.parallelise(
lambda output: self.activation(output),
outputs)
return self.merge(outputs, channel_axis=-1)
Dropout = Keep.create('Dropout')
Flatten = Keep.create('Flatten')
| 31.575758
| 151
| 0.600768
| 238
| 2,084
| 5.151261
| 0.352941
| 0.044861
| 0.029364
| 0.068516
| 0.102773
| 0.047308
| 0
| 0
| 0
| 0
| 0
| 0.004798
| 0.299904
| 2,084
| 65
| 152
| 32.061538
| 0.835504
| 0.082534
| 0
| 0.104167
| 0
| 0
| 0.024646
| 0
| 0
| 0
| 0
| 0.015385
| 0.020833
| 1
| 0.041667
| false
| 0
| 0.104167
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
39f8dcdaeba92c1fff96ab2beb0ef7065bdd2f6c
| 1,770
|
py
|
Python
|
stakingsvc/walletgui/views/paymentmethodview.py
|
biz2013/xwjy
|
8f4b5e3e3fc964796134052ff34d58d31ed41904
|
[
"Apache-2.0"
] | 1
|
2019-12-15T16:56:44.000Z
|
2019-12-15T16:56:44.000Z
|
stakingsvc/walletgui/views/paymentmethodview.py
|
biz2013/xwjy
|
8f4b5e3e3fc964796134052ff34d58d31ed41904
|
[
"Apache-2.0"
] | 87
|
2018-01-06T10:18:31.000Z
|
2022-03-11T23:32:30.000Z
|
stakingsvc/walletgui/views/paymentmethodview.py
|
biz2013/xwjy
|
8f4b5e3e3fc964796134052ff34d58d31ed41904
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging, sys
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from walletgui.controller.global_constants import *
from walletgui.controller.crypto_utils import CryptoUtility
from walletgui.controller.walletmanager import WalletManager
from walletgui.controller.paymentmethodmanager import PaymentMethodManager
from walletgui.views import errorpageview
from walletgui.views.models.useraccountinfo import *
logger = logging.getLogger("site.dashboard")
@login_required
def create(request):
try:
return render(request, 'walletgui/balance.html',
{'account': None, 'userpaymentmethod': None })
except Exception as e:
error_msg = '用户主页显示遇到错误: {0}'.format(sys.exc_info()[0])
logger.exception(error_msg)
return errorpageview.show_error(request, ERR_CRITICAL_IRRECOVERABLE,
'系统遇到问题,请稍后再试。。。{0}'.format(error_msg))
def edit(request):
try:
return render(request, 'walletgui/balance.html',
{'account': None, 'userpaymentmethod': None })
except Exception as e:
error_msg = '用户主页显示遇到错误: {0}'.format(sys.exc_info()[0])
logger.exception(error_msg)
return errorpageview.show_error(request, ERR_CRITICAL_IRRECOVERABLE,
'系统遇到问题,请稍后再试。。。{0}'.format(error_msg))
def delete(request):
try:
return render(request, 'walletgui/balance.html',
{'account': None, 'userpaymentmethod': None })
except Exception as e:
error_msg = '用户主页显示遇到错误: {0}'.format(sys.exc_info()[0])
logger.exception(error_msg)
return errorpageview.show_error(request, ERR_CRITICAL_IRRECOVERABLE,
'系统遇到问题,请稍后再试。。。{0}'.format(error_msg))
| 37.659574
| 76
| 0.70565
| 200
| 1,770
| 6.12
| 0.32
| 0.058824
| 0.075163
| 0.053922
| 0.60049
| 0.60049
| 0.60049
| 0.60049
| 0.60049
| 0.60049
| 0
| 0.006873
| 0.177966
| 1,770
| 46
| 77
| 38.478261
| 0.834364
| 0.021469
| 0
| 0.631579
| 0
| 0
| 0.145087
| 0.03815
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.236842
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
260b023afea60f62495ec2404352213dae65708e
| 1,160
|
py
|
Python
|
PageMonitor/project/lib/config.py
|
DanylZhang/IdeaWorkspace
|
726be80db4ca7dac4104ebaa22b795f37aca73e0
|
[
"MIT"
] | null | null | null |
PageMonitor/project/lib/config.py
|
DanylZhang/IdeaWorkspace
|
726be80db4ca7dac4104ebaa22b795f37aca73e0
|
[
"MIT"
] | null | null | null |
PageMonitor/project/lib/config.py
|
DanylZhang/IdeaWorkspace
|
726be80db4ca7dac4104ebaa22b795f37aca73e0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding:utf-8
import pymysql
default_config = {
'host': '139.196.96.149',
'port': 13306,
'user': 'dataway-rw',
'password': 'QqHVMhmN*8',
'db': 'jumei',
'charset': 'utf8mb4'
}
apollo_config = {
'host': '127.0.0.1',
'port': 11306,
'user': 'apollo-rw',
'password': 'QBT094bt',
'db': 'apollo',
'charset': 'utf8mb4',
'autocommit': True
}
allsite_config = {
'host': '127.0.0.1',
'port': 15306,
'user': 'apollo-rw',
'password': 'QBT094bt',
'db': 'all_site',
'charset': 'utf8mb4'
}
dataway_config = {
'host': '139.196.96.149',
'port': 13306,
'user': 'dataway-rw',
'password': 'QqHVMhmN*8',
'db': 'jumei',
'charset': 'utf8mb4'
}
dw_entity_config = {
'host': '127.0.0.1',
'port': 18306,
'user': 'qbt',
'password': 'QBT094bt',
'db': 'dw_entity',
'charset': 'utf8mb4',
'autocommit': True
}
channel_config = {
'host': 'channel.ecdataway.com',
'port': 3306,
'user': 'comment_catcher',
'password': 'cc33770880',
'db': 'monitor',
'charset': 'utf8mb4',
'cursorclass': pymysql.cursors.DictCursor
}
| 20.714286
| 45
| 0.546552
| 128
| 1,160
| 4.875
| 0.421875
| 0.096154
| 0.0625
| 0.067308
| 0.451923
| 0.451923
| 0.355769
| 0.259615
| 0.259615
| 0.259615
| 0
| 0.112849
| 0.228448
| 1,160
| 55
| 46
| 21.090909
| 0.584358
| 0.028448
| 0
| 0.5
| 0
| 0
| 0.420444
| 0.018667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.115385
| 0.019231
| 0
| 0.019231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
260f70e32d8dcbedd3be143cdb372b3adae4699c
| 1,356
|
py
|
Python
|
python/code_challenges/hashtable/test_hashtable.py
|
u-will/data-structures-and-algorithms
|
7ef7b5a527fbcacef8cbfe7a01fc69990c7358c3
|
[
"MIT"
] | null | null | null |
python/code_challenges/hashtable/test_hashtable.py
|
u-will/data-structures-and-algorithms
|
7ef7b5a527fbcacef8cbfe7a01fc69990c7358c3
|
[
"MIT"
] | null | null | null |
python/code_challenges/hashtable/test_hashtable.py
|
u-will/data-structures-and-algorithms
|
7ef7b5a527fbcacef8cbfe7a01fc69990c7358c3
|
[
"MIT"
] | null | null | null |
from hashtable import Hashtable
def test_create():
hashtable = Hashtable()
assert hashtable
def test_predictable_hash():
hashtable = Hashtable()
initial = hashtable._hash('spam')
secondary = hashtable._hash('spam')
assert initial == secondary
def test_in_range_hash():
hashtable = Hashtable()
actual = hashtable._hash('spam')
assert 0 <= actual < hashtable._size
def test_same_hash():
hashtable = Hashtable()
initial = hashtable._hash('listen')
secondary = hashtable._hash('silent')
assert initial == secondary
def test_different_hash():
hashtable = Hashtable()
initial = hashtable._hash('glisten')
secondary = hashtable._hash('silent')
assert initial != secondary
def test_get_apple():
hashtable = Hashtable()
hashtable.add("apple", "Used for apple sauce")
actual = hashtable.get("apple")
expected = "Used for apple sauce"
assert actual == expected
def test_get_silent_and_listen():
hashtable = Hashtable()
hashtable.add('listen','to me')
hashtable.add('silent','so quiet')
assert hashtable.get('listen') == 'to me'
assert hashtable.get('silent') == 'so quiet'
def test_contains():
hashtable = Hashtable()
hashtable.add('hello', 'me')
actual = hashtable.contains('hello')
expect = True
assert actual == expect
| 22.983051
| 50
| 0.672566
| 151
| 1,356
| 5.874172
| 0.251656
| 0.223224
| 0.099211
| 0.098083
| 0.303269
| 0.270575
| 0.128523
| 0.128523
| 0.128523
| 0
| 0
| 0.000924
| 0.202065
| 1,356
| 58
| 51
| 23.37931
| 0.818854
| 0
| 0
| 0.292683
| 0
| 0
| 0.109882
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 1
| 0.195122
| false
| 0
| 0.02439
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26155a333ec0b9d8dbde784e29edc2aabd3bd42b
| 1,707
|
py
|
Python
|
tests/test_characters.py
|
uliang/BloodSword
|
fc7e173ce56989c48009ec86d834072f9f2e70ac
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_characters.py
|
uliang/BloodSword
|
fc7e173ce56989c48009ec86d834072f9f2e70ac
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_characters.py
|
uliang/BloodSword
|
fc7e173ce56989c48009ec86d834072f9f2e70ac
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from bloodsword.characters.sage import Sage
from bloodsword.characters.warrior import Warrior
from bloodsword.descriptors.armour import Armour
@pytest.fixture
def warrior():
return Warrior("Warrior", rank=2)
@pytest.fixture
def sage():
return Sage("Sage", rank=2)
def test_character_initialization(warrior, sage):
assert str(
warrior) == "<Name:Warrior Fighting Prowess=8 Psychic Ability=6 Awareness=6 Endurance=12>"
assert str(
sage) == "<Name:Sage Fighting Prowess=7 Psychic Ability=7 Awareness=6 Endurance=10>"
def test_endurance_cannot_exceed_max(warrior):
warrior.endurance = 13
assert warrior.endurance == 12
def test_endurance_increments(warrior):
warrior.endurance = 10
warrior.endurance += 1
assert warrior.endurance == 11
def test_endurance_increments_beyond_max(warrior):
warrior.endurance = 10
warrior.endurance += 5
assert warrior.endurance == 12
def test_attribute_decrements(warrior, sage):
sage.endurance -= 1
assert sage.endurance == 9
warrior.endurance -= 1
assert warrior.endurance == 11
sage.psychic_ability -= 2
assert sage.psychic_ability == 5
def test_attribute_decrement_clips_at_zero(sage, warrior):
sage.psychic_ability -= 10
assert sage.psychic_ability == 0
warrior.endurance -= 20
assert warrior.endurance == 0
def test_that_character_can_only_carry_one_piece_of_armour(warrior):
initial_no_items_carried = len(warrior.items_carried)
new_armour = Armour('ringmail', damage_reduction=2)
warrior.armour = new_armour
assert warrior.armour.damage_reduction == 2
assert len(warrior.items_carried) == initial_no_items_carried
| 25.863636
| 98
| 0.741652
| 220
| 1,707
| 5.554545
| 0.295455
| 0.157119
| 0.090016
| 0.042553
| 0.171849
| 0.171849
| 0.067103
| 0
| 0
| 0
| 0
| 0.028934
| 0.169889
| 1,707
| 65
| 99
| 26.261538
| 0.833451
| 0
| 0
| 0.227273
| 0
| 0
| 0.098418
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.204545
| false
| 0
| 0.090909
| 0.045455
| 0.340909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
2616d68573a5381dc443c6d189b9ad8fa29013e9
| 201
|
py
|
Python
|
examples/nod.py
|
Antoniii/In_Rust_We_Trust
|
43513b4a34b2d7e20950db9a0ac811721db06a1a
|
[
"MIT"
] | null | null | null |
examples/nod.py
|
Antoniii/In_Rust_We_Trust
|
43513b4a34b2d7e20950db9a0ac811721db06a1a
|
[
"MIT"
] | null | null | null |
examples/nod.py
|
Antoniii/In_Rust_We_Trust
|
43513b4a34b2d7e20950db9a0ac811721db06a1a
|
[
"MIT"
] | null | null | null |
def nod(x,y):
if y != 0:
return nod(y, x % y)
else:
return x
n1 = int(input("Введите n1: "))
n2 = int(input("Введите n2: "))
print("НОД = ", nod(n1,n2))
print("НОK = ", int((n1*n2)/nod(n1,n2)))
| 18.272727
| 40
| 0.547264
| 38
| 201
| 2.894737
| 0.421053
| 0.145455
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067901
| 0.19403
| 201
| 11
| 40
| 18.272727
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0.178218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.333333
| 0.222222
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
2616fa0e716ad26e2e73aae0033703cf5ccaa90b
| 2,417
|
py
|
Python
|
UML2ER/contracts/HContract03_IsolatedLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 3
|
2017-06-02T19:26:27.000Z
|
2021-06-14T04:25:45.000Z
|
UML2ER/contracts/HContract03_IsolatedLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 8
|
2016-08-24T07:04:07.000Z
|
2017-05-26T16:22:47.000Z
|
UML2ER/contracts/HContract03_IsolatedLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 1
|
2019-10-31T06:00:23.000Z
|
2019-10-31T06:00:23.000Z
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HContract03_IsolatedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HContract03_IsolatedLHS
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HContract03_IsolatedLHS, self).__init__(name='HContract03_IsolatedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """return True"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'HContract03_IsolatedLHS')
self["equations"] = []
# Set the node attributes
# match class Property(Property) node
self.add_node()
self.vs[0]["MT_pre__attr1"] = """return True"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["mm__"] = """MT_pre__Property"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Property')
# match class Class(Class) node
self.add_node()
self.vs[1]["MT_pre__attr1"] = """return True"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["mm__"] = """MT_pre__Class"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Class')
# match class Package(Package) node
self.add_node()
self.vs[2]["MT_pre__attr1"] = """return True"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["mm__"] = """MT_pre__Package"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Package')
# apply class Feature(Feature) node
self.add_node()
self.vs[3]["MT_pre__attr1"] = """return True"""
self.vs[3]["MT_label__"] = """4"""
self.vs[3]["mm__"] = """MT_pre__Feature"""
self.vs[3]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Feature')
# apply class ERModel(ERModel) node
self.add_node()
self.vs[4]["MT_pre__attr1"] = """return True"""
self.vs[4]["MT_label__"] = """5"""
self.vs[4]["mm__"] = """MT_pre__ERModel"""
self.vs[4]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'ERModel')
# define evaluation methods for each apply class.
def eval_attr11(self, attr_value, this):
return True
def eval_attr12(self, attr_value, this):
return True
def eval_attr13(self, attr_value, this):
return True
def eval_attr14(self, attr_value, this):
return True
def eval_attr15(self, attr_value, this):
return True
def constraint(self, PreNode, graph):
return True
| 29.839506
| 102
| 0.679355
| 339
| 2,417
| 4.513274
| 0.224189
| 0.078431
| 0.027451
| 0.066667
| 0.375817
| 0.375817
| 0.193464
| 0.088889
| 0
| 0
| 0
| 0.027858
| 0.138602
| 2,417
| 80
| 103
| 30.2125
| 0.707012
| 0.162598
| 0
| 0.22449
| 0
| 0
| 0.231768
| 0.036464
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.040816
| 0.122449
| 0.326531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
26200a7b0fdb3b7f678a0edc56db44b7bab13d20
| 2,029
|
py
|
Python
|
tests/test_bulk_stats.py
|
sixy6e/eo-tools
|
9e2772a5f15e04f6f7e3941541381544247ae1f2
|
[
"Apache-2.0"
] | 3
|
2018-04-24T05:57:35.000Z
|
2019-07-23T13:06:11.000Z
|
tests/test_bulk_stats.py
|
sixy6e/eo-tools
|
9e2772a5f15e04f6f7e3941541381544247ae1f2
|
[
"Apache-2.0"
] | 1
|
2015-06-17T04:39:23.000Z
|
2015-06-17T04:39:23.000Z
|
tests/test_bulk_stats.py
|
sixy6e/eo-tools
|
9e2772a5f15e04f6f7e3941541381544247ae1f2
|
[
"Apache-2.0"
] | 2
|
2016-04-04T10:23:27.000Z
|
2020-02-28T08:43:49.000Z
|
#!/usr/bin/env python
# ===============================================================================
# Copyright 2015 Geoscience Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import unittest
import numpy.testing as npt
import numpy
from eotools.bulk_stats import bulk_stats
from scipy import stats
class TestStats(unittest.TestCase):
"""
Unittests for the bulk_stats funtion.
"""
def setUp(self):
self.data = numpy.random.ranf((10, 100, 100))
self.result = bulk_stats(self.data, double=True)
def test_mean(self):
"""
Test that the mean value is the same.
"""
control = numpy.mean(self.data, axis=0)
npt.assert_allclose(control, self.result[1])
def test_variance(self):
"""
Test that the variance is the same.
"""
control = numpy.var(self.data, axis=0, ddof=1)
npt.assert_allclose(control, self.result[3])
def test_standard_deviation(self):
"""
Test that the standard deviation is the same.
"""
control = numpy.std(self.data, axis=0, ddof=1)
npt.assert_allclose(control, self.result[4])
def test_geometric_mean(self):
"""
Test that the geometric mean is the same.
"""
control = stats.gmean(self.data, axis=0)
npt.assert_allclose(control, self.result[-1])
if __name__ == '__main__':
npt.run_module_suite()
| 27.794521
| 81
| 0.609167
| 259
| 2,029
| 4.679537
| 0.459459
| 0.049505
| 0.039604
| 0.049505
| 0.24835
| 0.165017
| 0.165017
| 0.165017
| 0.165017
| 0.165017
| 0
| 0.016456
| 0.221291
| 2,029
| 72
| 82
| 28.180556
| 0.750633
| 0.462297
| 0
| 0
| 0
| 0
| 0.008264
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.217391
| false
| 0
| 0.217391
| 0
| 0.478261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26406e146753c756d9c6b32ec6a428c03630d569
| 650
|
py
|
Python
|
app/__init__.py
|
Moobusy/learnpython
|
a9a35dec18fcdc4238e83f881c6e308667ec7029
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
Moobusy/learnpython
|
a9a35dec18fcdc4238e83f881c6e308667ec7029
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
Moobusy/learnpython
|
a9a35dec18fcdc4238e83f881c6e308667ec7029
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Docer
~~~~~~
A document viewing platform.
:copyright: (c) 2015 by Docer.Org.
:license: MIT, see LICENSE for more details.
'''
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from app.frontend import frontend as frontend_blueprint
from app.backend import backend as backend_blueprint
app = Flask(__name__)
app.config.from_object('config.DevelopmentConfig')
# mongodb
mongo = MongoEngine(app)
def create_app():
"""Create a flask app with a config."""
app.register_blueprint(frontend_blueprint)
app.register_blueprint(backend_blueprint, name = 'admin', url_prefix = '/admin')
return app
| 24.074074
| 81
| 0.747692
| 87
| 650
| 5.436782
| 0.494253
| 0.038055
| 0.084567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008913
| 0.136923
| 650
| 27
| 82
| 24.074074
| 0.834225
| 0.287692
| 0
| 0
| 0
| 0
| 0.078652
| 0.053933
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.363636
| 0
| 0.545455
| 0.363636
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
2641b42023211ee1b539a62e50d934444683d94e
| 953
|
py
|
Python
|
main.py
|
ghosh-r/text_scraper_for_bengali_poems
|
da148a0e8680fe365d4177fe269be875b0150c5c
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
ghosh-r/text_scraper_for_bengali_poems
|
da148a0e8680fe365d4177fe269be875b0150c5c
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
ghosh-r/text_scraper_for_bengali_poems
|
da148a0e8680fe365d4177fe269be875b0150c5c
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
from sys import argv
from tqdm import tqdm
from time import sleep
from random import uniform
from bs4 import BeautifulSoup
import requests
from get_pages_urls import get_list_pages
from get_urls import get_urls_per_page
from write_poem import write_poem
def main(root_url: str,
poet_url: str,
text_file: str
) -> None:
poems_list_pages = get_list_pages(root_url=root_url, poet_url=poet_url)
poem_pages = [get_urls_per_page(page) for page in poems_list_pages]
INDIV_POEM_URLS = []
for url_group in poem_pages:
for url in url_group:
INDIV_POEM_URLS.append(url)
for indiv_poem_url in tqdm(INDIV_POEM_URLS):
random_sleep_sec = uniform(0, 5)
sleep(random_sleep_sec)
write_poem(url=indiv_poem_url, text_file=text_file)
return
if __name__ == '__main__':
main(root_url=argv[1], poet_url=argv[2], text_file=argv[3])
| 24.435897
| 75
| 0.713536
| 152
| 953
| 4.092105
| 0.296053
| 0.072347
| 0.062701
| 0.045016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008097
| 0.222455
| 953
| 39
| 76
| 24.435897
| 0.831309
| 0
| 0
| 0
| 0
| 0
| 0.008386
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.37037
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
2648b74d5b48d2d6ff54adeaeb4a4c006edab254
| 3,106
|
py
|
Python
|
utils/emails.py
|
rajattomar1301/HomeWorks-Initial
|
be7e6b6db2f7cf414a1f488e2efc473a95d9338a
|
[
"MIT"
] | null | null | null |
utils/emails.py
|
rajattomar1301/HomeWorks-Initial
|
be7e6b6db2f7cf414a1f488e2efc473a95d9338a
|
[
"MIT"
] | 6
|
2021-03-18T22:07:14.000Z
|
2022-03-11T23:39:30.000Z
|
utils/emails.py
|
rajattomar1301/HomeWorks-Initial
|
be7e6b6db2f7cf414a1f488e2efc473a95d9338a
|
[
"MIT"
] | null | null | null |
import re
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
domain = "127.0.0.1"
def send_email(to, subject, text):
fromaddr = "[email protected]"
toaddr = to
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = subject
body = text
msg.attach(MIMEText(body, 'plain'))
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "mithereicome@91")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
def send_confirmation_email(user_email,user_name, confirmation_key):
send_email(user_email, "Welcome to HomeWorks by Dewan!",
"""Hello {}!, and thanks for registering with {}!
We are really happy that you choose to be a part of our site
Please click the below link to confirm your email with us:
{}/confirm_email_link/{}/
Once you've done that, your account will be enabled, and you will be able to access everything that is available with us
If you didn't register an account, then you can disregard this email.
Regards
Rajat Tomar
Founder & Creator
([email protected])""".format(user_name,"HomeWorks By Dewan",domain , confirmation_key))
def send_new_homework_email(user_email, user_name, teacher_name, subject, deadline):
send_email(user_email, "New HomeWork By {}".format(teacher_name),
"""Hey {}
I know this is probably bad news for ya but your {} teacher gave you a new homework to do.
It's due by {} so you better hurry fella!
It was awesome talking to you!
Happy Homeworking!
Regards
Rajat Tomar
System Admin and Creator
([email protected])""".format(user_name, subject, deadline))
def send_welcome_email(user_email, user_name, provider):
send_email(user_email, "Welcome To HomeWorks By Dewan",
"""Hey {}
I would like to take the opportunity of thanking you for signing up on our platform using your {} credentials.
There are a lot of things for you to explore so I suggest you get right on them :)
Once again welcome aboard sailor!
It was awesome talking to you!
Happy Homeworking!
Regards
Rajat Tomar
System Admin and Creator
([email protected])""".format(user_name, provider))
def send_query_email(name, email, phone, regard, query):
send_email("[email protected]", "New query from {}".format(name),"""Hey Rajat,
You have a new query from {} their email id is: {} and phone number is: {}
It's reagarding {} and they say:
{}
Thanks!
""".format(name, email, phone, regard, query))
def send_reset_email(email, name, conf_key):
send_email(email, "You requested for a new password!","""
Hey {}!
We see that you requested for your password to be changed!
Please click the below link for changing your password:
{}/reset-password-page/{}/{}/
Please note that the above link is valid only for one time use""".format(name,domain,email, conf_key))
def is_valid_email(email):
if len(email) > 7:
if re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", email) != None:
return 1
return 0
| 31.06
| 120
| 0.716355
| 479
| 3,106
| 4.561587
| 0.388309
| 0.037071
| 0.038444
| 0.024714
| 0.237529
| 0.163387
| 0.163387
| 0.143707
| 0.143707
| 0.104348
| 0
| 0.014655
| 0.165164
| 3,106
| 99
| 121
| 31.373737
| 0.827998
| 0
| 0
| 0
| 0
| 0.020833
| 0.331511
| 0.072068
| 0
| 0
| 0
| 0
| 0
| 1
| 0.145833
| false
| 0.083333
| 0.083333
| 0
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
26501096fb0d6e0fc4401d923fa758cce0b9b091
| 853
|
py
|
Python
|
entity/cards/LETL_017H/LETL_306.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 102
|
2021-10-20T09:06:39.000Z
|
2022-03-28T13:35:11.000Z
|
entity/cards/LETL_017H/LETL_306.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 98
|
2021-10-19T16:13:27.000Z
|
2022-03-27T13:27:49.000Z
|
entity/cards/LETL_017H/LETL_306.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 55
|
2021-10-19T03:56:50.000Z
|
2022-03-25T08:25:26.000Z
|
# -*- coding: utf-8 -*-
from hearthstone.entities import Entity
from entity.spell_entity import SpellEntity
class LETL_306(SpellEntity):
"""
冰风暴5
随机对3个敌方佣兵造成$6点伤害,并使其下回合的速度值减慢(2)点。0随机对3个敌方佣兵造成$7点伤害,并使其下回合的速度值减慢(2)点。0随机对3个敌方佣兵造成$8点伤害,并使其下回合的速度值减慢(2)点。0随机对3个敌方佣兵造成$9点伤害,并使其下回合的速度值减慢(2)点。0随机对3个敌方佣兵造成$10点伤害,并使其下回合的速度值减慢(2)点。
"""
def __init__(self, entity: Entity):
super().__init__(entity)
self.damage = 6
self.range = 3
def play(self, game, hero, target):
power = game.get_spell_power(self.spell_school, hero.own)
hero_list = game.get_hero_list(not hero.own())
for i, h in enumerate(hero_list):
if i >= 3:
break
h.got_damage(game, (self.damage + power) * self.damage_advantage[self.lettuce_role][
h.lettuce_role])
| 32.807692
| 183
| 0.644783
| 110
| 853
| 4.809091
| 0.481818
| 0.122873
| 0.132325
| 0.196597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042945
| 0.235639
| 853
| 25
| 184
| 34.12
| 0.768405
| 0.237984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.133333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
2661899145bc08ac1035d9320ae8710c8aceeb71
| 14,728
|
py
|
Python
|
electricpy/conversions.py
|
engineerjoe440/electricpy
|
03155a34ea024b61a9a6c41241cd664d0df0fb6b
|
[
"MIT"
] | null | null | null |
electricpy/conversions.py
|
engineerjoe440/electricpy
|
03155a34ea024b61a9a6c41241cd664d0df0fb6b
|
[
"MIT"
] | null | null | null |
electricpy/conversions.py
|
engineerjoe440/electricpy
|
03155a34ea024b61a9a6c41241cd664d0df0fb6b
|
[
"MIT"
] | null | null | null |
################################################################################
"""
`electricpy` Package - `conversions` Module.
>>> from electricpy import conversions
Filled with simple conversion functions to help manage unit conversions and the
like, this module is very helpful to electrical engineers.
Built to support operations similar to Numpy and Scipy, this package is designed
to aid in scientific calculations.
"""
################################################################################
# Import Local Requirements
from electricpy.constants import WATTS_PER_HP, Aabc, A012, KWH_PER_BTU
# Import Required Packages
import numpy as _np
# Define HP to Watts Calculation
def hp_to_watts(hp):
r"""
Horsepower to Watts Formula.
Calculates the power (in watts) given the
horsepower.
.. math:: P_{\text{watts}}=P_{\text{horsepower}}\cdot745.699872
Same as `watts`.
Parameters
----------
hp: float
The horsepower to compute.
Returns
-------
watts: float
The power in watts.
"""
return hp * WATTS_PER_HP
watts = hp_to_watts # Make Duplicate Name
# Define Watts to HP Calculation
def watts_to_hp(watt):
r"""
Watts to Horsepower Function.
Calculates the power (in horsepower) given
the power in watts.
.. math:: P_{\text{horsepower}}=\frac{P_{\text{watts}}}{745.699872}
Same as `horsepower`.
Parameters
----------
watt: float
The wattage to compute.
Returns
-------
hp: float
The power in horsepower.
"""
return watt / WATTS_PER_HP
horsepower = watts_to_hp # Make Duplicate Name
# Define kWh to BTU function and vice-versa
def kwh_to_btu(kWh):
r"""
Killo-Watt-Hours to BTU Function.
Converts kWh (killo-Watt-hours) to BTU (British Thermal Units).
.. math:: \text{BTU} = \text{kWh}\cdot3412.14
Same as `btu`.
Parameters
----------
kWh: float
The number of killo-Watt-hours
Returns
-------
BTU: float
The number of British Thermal Units
"""
return kWh * KWH_PER_BTU
btu = kwh_to_btu # Make Duplicate Name
def btu_to_kwh(BTU):
r"""
BTU to Kilo-Watt-Hours Function.
Converts BTU (British Thermal Units) to kWh (kilo-Watt-hours).
.. math:: \text{kWh} = \frac{\text{BTU}}{3412.14}
Same as `kwh`.
Parameters
----------
BTU: float
The number of British Thermal Units
Returns
-------
kWh: float
The number of kilo-Watt-hours
"""
return BTU / KWH_PER_BTU
kwh = btu_to_kwh # Make Duplicate Name
# Define Simple Radians to Hertz Converter
def rad_to_hz(radians):
r"""
Radians to Hertz Converter.
Accepts a frequency in radians/sec and calculates
the hertz frequency (in Hz).
.. math:: f_{\text{Hz}} = \frac{f_{\text{rad/sec}}}{2\cdot\pi}
Same as `hertz`.
Parameters
----------
radians: float
The frequency (represented in radians/sec)
Returns
-------
hertz: float
The frequency (represented in Hertz)
"""
return radians / (2 * _np.pi) # Evaluate and Return
hertz = rad_to_hz # Make Duplicate Name
# Define Simple Hertz to Radians Converter
def hz_to_rad(hz):
r"""
Hertz to Radians Converter.
Accepts a frequency in Hertz and calculates
the frequency in radians/sec.
.. math:: f_{\text{rad/sec}} = f_{\text{Hz}}\cdot2\cdot\pi
Same as `radsec`.
Parameters
----------
hz: float
The frequency (represented in Hertz)
Returns
-------
radians: float
The frequency (represented in radians/sec)
"""
return hz * (2 * _np.pi) # Evaluate and Return
radsec = hz_to_rad # Make Duplicate Name
# Define Sequence Component Conversion Function
def abc_to_seq(Mabc, reference='A'):
r"""
Phase-System to Sequence-System Conversion.
Converts phase-based values to sequence
components.
.. math:: M_{\text{012}}=A_{\text{012}}\cdot M_{\text{ABC}}
Same as phs_to_seq.
Parameters
----------
Mabc: list of complex
Phase-based values to be converted.
reference: {'A', 'B', 'C'}
Single character denoting the reference,
default='A'
Returns
-------
M012: numpy.array
Sequence-based values in order of 0-1-2
See Also
--------
seq_to_abc: Sequence to Phase Conversion
sequence: Phase Impedance to Sequence Converter
"""
# Condition Reference:
reference = reference.upper()
if reference == 'A':
M = Aabc
elif reference == 'B':
M = _np.roll(Aabc, 1, 0)
elif reference == 'C':
M = _np.roll(Aabc, 2, 0)
else:
raise ValueError("Invalid Phase Reference.")
return M.dot(Mabc)
# Define Second Name for abc_to_seq
phs_to_seq = abc_to_seq
# Define Phase Component Conversion Function
def seq_to_abc(M012, reference='A'):
r"""
Sequence-System to Phase-System Conversion.
Converts sequence-based values to phase
components.
.. math:: M_{\text{ABC}}=A_{\text{012}}^{-1}\cdot M_{\text{012}}
Same as seq_to_phs.
Parameters
----------
M012: list of complex
Sequence-based values to convert.
reference: {'A', 'B', 'C'}
Single character denoting the reference,
default='A'
Returns
-------
Mabc: numpy.array
Phase-based values in order of A-B-C
See Also
--------
abc_to_seq: Phase to Sequence Conversion
sequence: Phase Impedance to Sequence Converter
"""
# Compute Dot Product
M = A012.dot(M012)
# Condition Reference:
reference = reference.upper()
if reference == 'A':
pass
elif reference == 'B':
M = _np.roll(M, 1, 0)
elif reference == 'C':
M = _np.roll(M, 2, 0)
else:
raise ValueError("Invalid Phase Reference.")
return M
# Define Second Name for seq_to_abc
seq_to_phs = seq_to_abc
# Define Sequence Impedance Calculator
def sequencez(Zabc, reference='A', resolve=False, diag=False, rounds=3):
r"""
Sequence Impedance Calculator.
Accepts the phase (ABC-domain) impedances for a
system and calculates the sequence (012-domain)
impedances for the same system. If the argument
`resolve` is set to true, the function will
combine terms into the set of [Z0, Z1, Z2].
When resolve is False:
.. math:: Z_{\text{012-M}}=A_{\text{012}}^{-1}Z_{\text{ABC}}A_{\text{012}}
When resolve is True:
.. math:: Z_{\text{012}}=A_{\text{012}}Z_{\text{ABC}}A_{\text{012}}^{-1}
Parameters
----------
Zabc: numpy.array of complex
2-D (3x3) matrix of complex values
representing the pharo impedance
in the ABC-domain.
reference: {'A', 'B', 'C'}
Single character denoting the reference,
default='A'
resolve: bool, optional
Control argument to force the function to
evaluate the individual sequence impedance
[Z0, Z1, Z2], default=False
diag: bool, optional
Control argument to force the function to
reduce the matrix to its diagonal terms.
rounds: int, optional
Integer denoting number of decimal places
resulting matrix should be rounded to.
default=3
Returns
-------
Z012: numpy.array of complex
2-D (3x3) matrix of complex values
representing the sequence impedance
in the 012-domain
See Also
--------
seq_to_abc: Sequence to Phase Conversion
abc_to_seq: Phase to Sequence Conversion
"""
# Condition Reference
reference = reference.upper()
roll_rate = {'A': 0, 'B': 1, 'C': 2}
# Test Validity
if reference not in roll_rate:
raise ValueError("Invalad Phase Reference")
# Determine Roll Factor
roll = roll_rate[reference]
# Evaluate Matrices
M012 = _np.roll(A012, roll, 0)
min_v = _np.linalg.inv(M012)
# Compute Sequence Impedance
if resolve:
Z012 = M012.dot(Zabc.dot(min_v))
else:
Z012 = min_v.dot(Zabc.dot(M012))
# Reduce to Diagonal Terms if Needed
if diag:
Z012 = [Z012[0][0], Z012[1][1], Z012[2][2]]
return _np.around(Z012, rounds)
# Define Angular Velocity Conversion Functions
def rad_to_rpm(rad):
"""
Radians-per-Second to RPM Converter.
Given the angular velocity in rad/sec, this function will evaluate the
velocity in RPM (Revolutions-Per-Minute).
Parameters
----------
rad: float
The angular velocity in radians-per-second
Returns
-------
rpm: float
The angular velocity in revolutions-per-minute (RPM)
"""
rpm = 60 / (2 * _np.pi) * rad
return rpm
# Define Angular Velocity Conversion Functions
def rpm_to_rad(rpm):
"""
RPM to Radians-per-Second Converter.
Given the angular velocity in RPM (Revolutions-Per-Minute), this function
will evaluate the velocity in rad/sec.
Parameters
----------
rpm: float
The angular velocity in revolutions-per-minute (RPM)
Returns
-------
rad: float
The angular velocity in radians-per-second
"""
rad = 2 * _np.pi / 60 * rpm
return rad
# Define Angular Velocity Conversion Functions
def hz_to_rpm(hz):
"""
Hertz to RPM Converter.
Given the angular velocity in Hertz, this function will evaluate the
velocity in RPM (Revolutions-Per-Minute).
Parameters
----------
hz: float
The angular velocity in Hertz
Returns
-------
rpm: float
The angular velocity in revolutions-per-minute (RPM)
"""
return hz * 60
# Define Angular Velocity Conversion Functions
def rpm_to_hz(rpm):
"""
RPM to Hertz Converter.
Given the angular velocity in RPM (Revolutions-Per-Minute), this function
will evaluate the velocity in Hertz.
Parameters
----------
rpm: float
The angular velocity in revolutions-per-minute (RPM)
Returns
-------
hz: float
The angular velocity in Hertz
"""
return rpm / 60
# Define dBW to Watts converter
def dbw_to_watts(dbw):
"""
Convert dBW to Watts.
Given the power in the decibel scale, this function will evaluate the
power in Watts.
Parameters
----------
dbw: float
Power in the decibel scale (dBW)
Returns
-------
watts float
Power in Watts
"""
return 10 ** (dbw / 10)
# Define Watts to dBW converter
def watts_to_dbw(watt):
"""
Watt to dBW converter.
Given the power in watts, this function will evaluate the power in the
decibel scale.
Parameters
----------
watt: float
Power in Watts
Return
------
dbw: Power in the decibel scale (dBW)
"""
return 10 * _np.log10(watt)
# Define dbW to dBmW converter
def dbw_to_dbmw(dbw):
"""
Convert dBW to dBmW.
Given the power in the decibel scale, this function will evaluate the power
in the decibel-milli-watts scale.
Parameters
----------
dbw: float
Power in the decibel scale (dBW)
Return
------
dbmw: float
Power in the decibel-milli-watts scale (dBmW)
"""
return dbw + 30
# Define dBmW to dBW converter
def dbmw_to_dbw(dbmw):
"""
Convert dBmW to dBW.
Given the power in the decibel milli-watts-scale, this function will evaluate
the power in the decibel scale.
Parameters
----------
dbmw: float
Power in the decibel-milli-watts scale (dBmW)
Return
------
dbw: float
Power in the decibel scale (dBW)
"""
return dbmw - 30
# Define dBmW to Watts converter
def dbmw_to_watts(dbmw):
"""
Convert dbmW to Watts.
Given the power in the decibel milli-watts-scale, this function will evaluate
the power in watts.
Parameters
----------
dbmw: float
Power in the decibel-milli-watts scale (dBmW)
Return
------
watt: float
Power in Watts
"""
dbw = dbmw_to_dbw(dbmw)
return dbw_to_watts(dbw)
# Define Watts to dBmW converter
def watts_to_dbmw(watt):
"""
Watts to dBmW.
Given the power in watts, this function will evaluate
the power in the decibel milli-watt scale.
Parameters
----------
watt: float
Power in Watts
Return
------
dbmw: float
Power in the decibel-milli-watts scale (dBmW)
"""
dbw = watts_to_dbw(watt)
return dbw_to_dbmw(dbw)
# Define Voltage to decibel converter
def voltage_to_db(voltage, ref_voltage):
"""
Voltage to Decibel.
Given the voltage and reference voltage, this function will evaluate
the voltage in the decibel scale.
Parameters
----------
voltage: float
voltage
ref_voltage: float
Reference voltage
Return
------
decibel: float
voltage in the decibel scale
"""
return 20 * _np.log10(voltage / ref_voltage)
# Define Decibel to reference Voltage
def db_to_vref(db, voltage):
"""
Decibel to Reference Voltage.
Given decibel and voltage, this function will evaluate
the power of reference voltage.
Parameters
----------
db: float
voltage in Decibel
voltage: float
Voltage
Return
------
ref_voltage: float
reference voltage
"""
return voltage * _np.power(10, -(db / 20))
# Define Decibel to reference Voltage
def db_to_voltage(db, ref_voltage):
"""
Decibel to Reference Voltage.
Given decibel and voltage, this function will evaluate
the power of reference voltage.
Parameters
----------
db: float
voltage in Decibel
ref_voltage: float
Ref Voltage
Return
------
voltage: float
Voltage
"""
return ref_voltage * _np.power(10, -(db / 20))
# END
| 23.266983
| 81
| 0.577404
| 1,789
| 14,728
| 4.670207
| 0.129681
| 0.024297
| 0.025853
| 0.032555
| 0.499461
| 0.448235
| 0.387911
| 0.358827
| 0.279952
| 0.243447
| 0
| 0.019619
| 0.311312
| 14,728
| 632
| 82
| 23.303797
| 0.804101
| 0.693441
| 0
| 0.141414
| 0
| 0
| 0.029153
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.010101
| 0.020202
| 0
| 0.464646
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
266b7396a2ed1939667431e7fe0b116000780021
| 1,358
|
py
|
Python
|
script/python3/util/env.py
|
setminami/IrControl
|
bcdd44b7f6aeca75226cdcfc611dc63032c38949
|
[
"MIT"
] | null | null | null |
script/python3/util/env.py
|
setminami/IrControl
|
bcdd44b7f6aeca75226cdcfc611dc63032c38949
|
[
"MIT"
] | 2
|
2018-09-21T11:53:28.000Z
|
2018-12-30T03:37:23.000Z
|
script/python3/util/env.py
|
setminami/IrControl
|
bcdd44b7f6aeca75226cdcfc611dc63032c38949
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# this made for python3
from os import environ
def expand_env(params, verbose=False):
""" dotenv like function, but not dotenv """
for key, val in params.items():
_print('try %s, %s'%(key, val), verbose)
if isinstance(val, dict):
_print('ORDEREDDICT', verbose)
params[key] = expand_env(val, verbose)
elif isinstance(val, list):
_print('LIST', verbose)
params[key] = [expand_env(x, verbose) for x in val]
elif isinstance(val, str) and (val.startswith('${') \
and val.endswith('}')):
_print('LEAF', verbose)
env_key = val[2:-1]
if env_key in list(environ.keys()):
params[key] = environ[env_key]
_print('Overwrite env value {} = {}'.format(val, '***'), verbose)
_print('If not fire IFTTT triggers, Plase re-check your own IFTTT key settings.')
else:
_print('## {} not exported for {}. Please check your yaml file and env. ##'.format(env_key, key), verbose)
_print('Env {} vs keys = {}'.format(env_key, list(environ.keys())), verbose)
exit(1)
else:
_print('?? %s TYPE is %s'%(val, type(val)), verbose)
return params
def _print(msg, v=False):
if v: print(msg)
| 39.941176
| 122
| 0.54271
| 168
| 1,358
| 4.279762
| 0.404762
| 0.041725
| 0.044506
| 0.061196
| 0.069541
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005319
| 0.307806
| 1,358
| 33
| 123
| 41.151515
| 0.759574
| 0.060383
| 0
| 0.074074
| 0
| 0
| 0.184543
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.037037
| 0
| 0.148148
| 0.407407
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
2670f782ce4049f02c248c80f13a94aafff1be8d
| 1,440
|
py
|
Python
|
game/content/ghplots/__init__.py
|
AmkG/gearhead-caramel
|
0238378295a09b4b33adb2ec0854fa06b0ad7b1b
|
[
"Apache-2.0"
] | null | null | null |
game/content/ghplots/__init__.py
|
AmkG/gearhead-caramel
|
0238378295a09b4b33adb2ec0854fa06b0ad7b1b
|
[
"Apache-2.0"
] | null | null | null |
game/content/ghplots/__init__.py
|
AmkG/gearhead-caramel
|
0238378295a09b4b33adb2ec0854fa06b0ad7b1b
|
[
"Apache-2.0"
] | null | null | null |
import inspect
from . import actionscenes
from . import dd_combatmission
from . import dd_customobjectives
from . import dd_distanttown
from . import dd_homebase
from . import dd_intro
from . import dd_lancedev
from . import dd_main
from . import dd_roadedge
from . import dd_roadedge_propp
from . import dd_roadstops
from . import dd_tarot
from . import dd_tarotsupport
from . import encounters
from . import lancemates
from . import missionbuilder
from . import mocha
from . import recovery
from . import utility
from game.content import mechtarot, PLOT_LIST, UNSORTED_PLOT_LIST, CARDS_BY_NAME
from pbge.plots import Plot
def harvest( mod ):
for name in dir( mod ):
o = getattr( mod, name )
if inspect.isclass( o ) and issubclass( o , Plot ) and o is not Plot and o is not mechtarot.TarotCard:
PLOT_LIST[ o.LABEL ].append( o )
UNSORTED_PLOT_LIST.append( o )
# print o.__name__
if issubclass(o,mechtarot.TarotCard):
CARDS_BY_NAME[o.__name__] = o
harvest(actionscenes)
harvest(dd_combatmission)
harvest(dd_customobjectives)
harvest(dd_distanttown)
harvest(dd_homebase)
harvest(dd_intro)
harvest(dd_lancedev)
harvest(dd_main)
harvest(dd_roadedge)
harvest(dd_roadedge_propp)
harvest(dd_roadstops)
harvest(dd_tarot)
harvest(dd_tarotsupport)
harvest(encounters)
harvest(lancemates)
harvest(missionbuilder)
harvest(mocha)
harvest(recovery)
harvest(utility)
| 26.181818
| 110
| 0.758333
| 197
| 1,440
| 5.319797
| 0.263959
| 0.181298
| 0.137405
| 0.038168
| 0.024809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172222
| 1,440
| 54
| 111
| 26.666667
| 0.879195
| 0.011111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020408
| false
| 0
| 0.44898
| 0
| 0.469388
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
2683c81a292c3d694af782d7d6d9714cc7c5d078
| 5,264
|
py
|
Python
|
madic/tests/test_io.py
|
dcroote/madic
|
fb00f312f5abc9f5a0bfc4a00a5a2e6e1c4ee563
|
[
"BSD-3-Clause"
] | 2
|
2017-12-08T03:24:22.000Z
|
2017-12-13T10:22:09.000Z
|
madic/tests/test_io.py
|
dcroote/madic
|
fb00f312f5abc9f5a0bfc4a00a5a2e6e1c4ee563
|
[
"BSD-3-Clause"
] | null | null | null |
madic/tests/test_io.py
|
dcroote/madic
|
fb00f312f5abc9f5a0bfc4a00a5a2e6e1c4ee563
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import pandas as pd
import numpy as np
from pandas.testing import assert_series_equal
from madic import io
class TestChromatogramExpansion(object):
def setup_method(self):
# two rows of comma separated intensity chromatograms
self.df = pd.DataFrame([['1,2,3,4,5,6,5,4,3,2,1'],
['1,2,3,4,5,4,3,2,1']],
columns=['intensities'])
def test_expand_comma_sep_series_no_smoothing(self):
expected_series = pd.Series([np.array([1., 2., 3., 4., 5., 6., 5.,
4., 3., 2., 1.]),
np.array([1., 2., 3., 4., 5., 4., 3.,
2., 1.])],
name='intensities')
result = io._expand_comma_sep_series(self.df.intensities)
assert_series_equal(result, expected_series)
def test_expand_comma_sep_series_with_smoothing(self):
expected_series = pd.Series([np.array([1., 2., 3., 4., 4.6, 4.8,
4.6, 4., 3., 2., 1.]),
np.array([1., 2., 3., 3.6, 3.8, 3.6,
3., 2., 1.])],
name='intensities')
result = io._expand_comma_sep_series(self.df.intensities,
smooth=True)
assert_series_equal(result, expected_series)
class TestReplicateColumnSplit(object):
def setup_method(self):
self.series = pd.Series(['Site4_ConditionA_Part2_094',
'Site4_ConditionA_Part3_095',
'Site4_ConditionB_Part2_096',
'Site4_ConditionB_Part3_097'
])
def test_split_delimiter_position(self):
expected_series = pd.Series(['ConditionA', 'ConditionA', 'ConditionB',
'ConditionB'], name='sample_name')
result = io.replicate_to_sample_name(self.series, '_', 1)
assert_series_equal(result, expected_series)
def test_load_skyline_transition_report():
report_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../../examples/'
'madic_skyline_daily_data.csv'))
df = io.read_transition_report(report_path,
delimiter='_',
delimiter_pos=1)
assert sorted(df.label.unique()) == ['heavy', 'light']
assert df.shape[0] == 40
assert df.pep.unique().size == 2
assert df.sample_name.unique().size == 2
assert df.rep.unique().size == 4
def test_write_out_summary(tmpdir):
summary = pd.DataFrame([
['sample1','PEP1',True,True,True,True,False],
['sample1','PEP2',True,False,False,True,False]],
columns=['sample_name', 'pep',
'pass_signal_to_noise',
'pass_transition_ratio',
'pass_retention_time',
'pass_all_replicate',
'interference_corrected'])
# write data
path = tmpdir.join('summary.csv')
summary.to_csv(str(path), index=False)
# load expected file contents
testsdir = os.path.abspath(os.path.dirname(__file__))
expected_file = os.path.join(testsdir, 'data/for_testing_summary.csv')
with open(expected_file) as f:
expected = f.read()
# compare contents
assert path.read() == expected
def test_write_out_data(tmpdir):
df = pd.DataFrame([
['rep1','PEP1','y5',True,True,True,True,False],
['rep1','PEP1','y6',True,True,True,True,False],
['rep1','PEP1','y7',True,True,True,True,False],
['rep2','PEP1','y5',True,True,True,True,False],
['rep2','PEP1','y6',True,True,True,True,False],
['rep2','PEP1','y7',True,True,True,True,False],
['rep1','PEP2','y5',True,True,False,True,False],
['rep1','PEP2','y6',True,True,True,True,False],
['rep1','PEP2','y7',True,True,True,True,False],
['rep2','PEP2','y5',True,False,True,True,False],
['rep2','PEP2','y6',True,False,True,True,False],
['rep2','PEP2','y7',True,False,True,True,False]
],
columns=['rep', 'pep', 'prod_ion',
'pass_signal_to_noise',
'pass_transition_ratio',
'pass_retention_time',
'pass_all_replicate',
'interference'])
df['sample_name'] = 'sample1'
df['label'] = 'light'
df['times_arr'] = [np.arange(3)]*12
df['intensities_arr'] = [[500.1, 800.9, 500.1]]*12
path = tmpdir.join('data.csv')
io.write_out_data(df, str(path))
testsdir = os.path.abspath(os.path.dirname(__file__))
expected_file = os.path.join(testsdir, 'data/for_testing_data.csv')
with open(expected_file) as f:
expected = f.read()
assert path.read() == expected
| 37.6
| 79
| 0.510448
| 588
| 5,264
| 4.360544
| 0.22449
| 0.096724
| 0.084243
| 0.056162
| 0.520281
| 0.450858
| 0.418097
| 0.297972
| 0.263651
| 0.24961
| 0
| 0.044237
| 0.347264
| 5,264
| 139
| 80
| 37.870504
| 0.701979
| 0.020327
| 0
| 0.23
| 0
| 0.01
| 0.148486
| 0.052407
| 0
| 0
| 0
| 0
| 0.11
| 1
| 0.08
| false
| 0.08
| 0.05
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
2688264518d4b814e3d6dda29397dc244b099dd8
| 226
|
py
|
Python
|
plot.py
|
arthtyagi/makefile
|
d64c38ddad63c7f90dd23c26e3f398229aa7dfa4
|
[
"MIT"
] | null | null | null |
plot.py
|
arthtyagi/makefile
|
d64c38ddad63c7f90dd23c26e3f398229aa7dfa4
|
[
"MIT"
] | null | null | null |
plot.py
|
arthtyagi/makefile
|
d64c38ddad63c7f90dd23c26e3f398229aa7dfa4
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
x = []
y = []
with open('points') as f:
for point in map(lambda x: x.split(), f.readlines()):
x.append(int(point[0]))
y.append(int(point[1]))
plt.scatter(x, y)
plt.show()
| 17.384615
| 57
| 0.588496
| 38
| 226
| 3.5
| 0.631579
| 0.030075
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011299
| 0.216814
| 226
| 12
| 58
| 18.833333
| 0.740113
| 0
| 0
| 0
| 0
| 0
| 0.026549
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
268a1a32773db807fdd5256fd06cf225b0146c23
| 335
|
py
|
Python
|
drafts/vanish_explode_gradients/freeze_lower_layers.py
|
quanhua92/deeplearning_tutorials
|
32fec492ca21c248dd1fb234db0a95a532df3469
|
[
"MIT"
] | 1
|
2017-07-06T13:00:36.000Z
|
2017-07-06T13:00:36.000Z
|
drafts/vanish_explode_gradients/freeze_lower_layers.py
|
quanhua92/deeplearning_tutorials
|
32fec492ca21c248dd1fb234db0a95a532df3469
|
[
"MIT"
] | null | null | null |
drafts/vanish_explode_gradients/freeze_lower_layers.py
|
quanhua92/deeplearning_tutorials
|
32fec492ca21c248dd1fb234db0a95a532df3469
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
# We will train all layers except hidden[12]. Therefore, Layers 1 and 2 are frozen
train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="hidden[34]|outputs")
loss = None # Your loss is here
train_op = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(loss, var_list=train_vars)
| 47.857143
| 100
| 0.79403
| 53
| 335
| 4.886792
| 0.773585
| 0.069498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030201
| 0.110448
| 335
| 7
| 100
| 47.857143
| 0.838926
| 0.292537
| 0
| 0
| 0
| 0
| 0.076596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
268d50e291169e0e5fc4a8c8469d8e4d8109bcce
| 585
|
py
|
Python
|
code/app/configuration/configuration.py
|
WesleyAdriann/discord_bot_dota
|
8a2921ee548f2fc6066bf15e7aed6688037dd434
|
[
"MIT"
] | null | null | null |
code/app/configuration/configuration.py
|
WesleyAdriann/discord_bot_dota
|
8a2921ee548f2fc6066bf15e7aed6688037dd434
|
[
"MIT"
] | null | null | null |
code/app/configuration/configuration.py
|
WesleyAdriann/discord_bot_dota
|
8a2921ee548f2fc6066bf15e7aed6688037dd434
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
class Configuration:
def __init__(self):
self.DOTA_BUFF_BASE_URL = 'https://pt.dotabuff.com'
self.HEROES = f'{self.DOTA_BUFF_BASE_URL}/heroes'
self.HERO_COUNTERS = '/counters'
self.HERO_COUNTERS_FULL = f'{self.DOTA_BUFF_BASE_URL}/hero-name/{self.HERO_COUNTERS}'
self.DISCORD_BOT_KEY = os.environ.get('DISCORD_BOT_KEY')
def configure_hero_counters_full(self, hero):
hero_name = '-'.join(hero.split()).lower()
self.HERO_COUNTERS_FULL = f'{self.HEROES}/{hero_name}{self.HERO_COUNTERS}'
| 34.411765
| 93
| 0.676923
| 82
| 585
| 4.47561
| 0.402439
| 0.13079
| 0.217984
| 0.13079
| 0.414169
| 0.231608
| 0
| 0
| 0
| 0
| 0
| 0.002075
| 0.176068
| 585
| 16
| 94
| 36.5625
| 0.759336
| 0.035897
| 0
| 0
| 0
| 0
| 0.322638
| 0.237077
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
268f327d0be7b5ea3314ce2aea07749bbbce81aa
| 908
|
py
|
Python
|
grpc_examples/sending_image/client.py
|
iwmq/coding_notes
|
8e406124bcd2dbf3228e945701e952c3a12025c6
|
[
"MIT"
] | null | null | null |
grpc_examples/sending_image/client.py
|
iwmq/coding_notes
|
8e406124bcd2dbf3228e945701e952c3a12025c6
|
[
"MIT"
] | null | null | null |
grpc_examples/sending_image/client.py
|
iwmq/coding_notes
|
8e406124bcd2dbf3228e945701e952c3a12025c6
|
[
"MIT"
] | null | null | null |
"""
The Python implementation of the GRPC image client.
Modified from grpc/examples/python/helloworld/greeting_client.py.
"""
from __future__ import print_function
import logging
from io import BytesIO
from PIL import Image
import grpc
import image_pb2
import image_pb2_grpc
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
stub = image_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(image_pb2.HelloRequest(name='you'))
message = response.message
buf = BytesIO(response.image)
buf.seek(0)
image = Image.open(buf, formats=["JPEG"])
print(f"Greeter client received: {message}")
image.show()
if __name__ == '__main__':
logging.basicConfig()
run()
| 25.222222
| 78
| 0.718062
| 124
| 908
| 5.08871
| 0.572581
| 0.050713
| 0.044374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01355
| 0.187225
| 908
| 35
| 79
| 25.942857
| 0.841463
| 0.305066
| 0
| 0
| 0
| 0
| 0.10306
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.35
| 0
| 0.4
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
cd0d1977c612b5942005c2d4eceddb8039516a10
| 7,249
|
py
|
Python
|
test/unit/mysql_db_admin/process_request.py
|
mjpernot/mysql-mysql-db-admin
|
4821d6923155a48362869a6f2bf8c69fe3e533d4
|
[
"MIT"
] | null | null | null |
test/unit/mysql_db_admin/process_request.py
|
mjpernot/mysql-mysql-db-admin
|
4821d6923155a48362869a6f2bf8c69fe3e533d4
|
[
"MIT"
] | null | null | null |
test/unit/mysql_db_admin/process_request.py
|
mjpernot/mysql-mysql-db-admin
|
4821d6923155a48362869a6f2bf8c69fe3e533d4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Classification (U)
"""Program: process_request.py
Description: Unit testing of process_request in mysql_db_admin.py.
Usage:
test/unit/mysql_db_admin/process_request.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import mysql_db_admin
import lib.gen_libs as gen_libs
import version
__version__ = version.__version__
def func_holder(server, dbs, tbl):
"""Method: func_holder
Description: Function stub holder for a generic function call.
Arguments:
server
dbs
tbl
"""
status = True
if server and dbs and tbl:
status = True
return status
class Server(object):
"""Class: Server
Description: Class stub holder for mysql_class.Server class.
Methods:
__init__
"""
def __init__(self):
"""Method: __init__
Description: Class initialization.
Arguments:
"""
pass
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_mysql_80
test_pre_mysql_80
test_single_miss_tbl
test_single_tbl
test_all_tbls
test_all_dbs
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.server = Server()
self.func_name = func_holder
self.db_name = None
self.db_name2 = ["db1"]
self.tbl_name = None
self.tbl_name2 = ["tbl1"]
self.tbl_name3 = ["tbl3"]
self.version = {"version": "5.7"}
self.version2 = {"version": "8.0"}
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_mysql_80(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_version):
"""Function: test_mysql_80
Description: Test with processing all databases.
Arguments:
"""
mock_version.return_value = self.version2
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name, self.tbl_name))
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_pre_mysql_80(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_version):
"""Function: test_pre_mysql_80
Description: Test with processing all databases.
Arguments:
"""
mock_version.return_value = self.version
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name, self.tbl_name))
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.detect_dbs")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_single_miss_tbl(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_detect, mock_version):
"""Function: test_single_miss_tbl
Description: Test with single missing table in a database.
Arguments:
"""
mock_version.return_value = self.version
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
mock_detect.return_value = True
with gen_libs.no_std_out():
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name2,
self.tbl_name3))
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.detect_dbs")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_single_tbl(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_detect, mock_version):
"""Function: test_single_tbl
Description: Test with single table in a database.
Arguments:
"""
mock_version.return_value = self.version
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
mock_detect.return_value = True
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name2, self.tbl_name2))
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.detect_dbs")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_all_tbls(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_detect, mock_version):
"""Function: test_all_tbls
Description: Test with all tables in a database.
Arguments:
"""
mock_version.return_value = self.version
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
mock_detect.return_value = True
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name2, self.tbl_name))
@mock.patch("mysql_db_admin.mysql_class.fetch_sys_var")
@mock.patch("mysql_db_admin.gen_libs.dict_2_list")
@mock.patch("mysql_db_admin.mysql_libs.fetch_tbl_dict")
@mock.patch("mysql_db_admin.mysql_libs.fetch_db_dict")
def test_all_dbs(self, mock_fetch_db, mock_fetch_tbl, mock_list,
mock_version):
"""Function: test_all_dbs
Description: Test with processing all databases.
Arguments:
"""
mock_version.return_value = self.version
mock_fetch_db.return_value = True
mock_fetch_tbl.return_value = True
mock_list.side_effect = [["db1"], ["tbl1", "tbl2"]]
self.assertFalse(
mysql_db_admin.process_request(
self.server, self.func_name, self.db_name, self.tbl_name))
if __name__ == "__main__":
unittest.main()
| 26.36
| 76
| 0.646986
| 943
| 7,249
| 4.586426
| 0.120891
| 0.058266
| 0.099884
| 0.099884
| 0.719306
| 0.69896
| 0.69896
| 0.69896
| 0.69896
| 0.69896
| 0
| 0.010353
| 0.253828
| 7,249
| 274
| 77
| 26.456204
| 0.78924
| 0.193682
| 0
| 0.638655
| 0
| 0
| 0.201939
| 0.182733
| 0
| 0
| 0
| 0
| 0.05042
| 1
| 0.07563
| false
| 0.008403
| 0.067227
| 0
| 0.168067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
cd190e09b3c36d0f4700cf8693b8dfde027f164e
| 6,680
|
py
|
Python
|
초보를 위한 셀레니움/#1 Google Screenshots Scrapping/main.py
|
donddog/Nomad_Academy_Online_Course_Codes
|
391fde26052a67f7b533219ab0de6096830697b6
|
[
"MIT"
] | 1
|
2021-02-11T16:45:22.000Z
|
2021-02-11T16:45:22.000Z
|
초보를 위한 셀레니움/#1 Google Screenshots Scrapping/main.py
|
donddog/Nomad_Academy_Online_Course_Codes
|
391fde26052a67f7b533219ab0de6096830697b6
|
[
"MIT"
] | null | null | null |
초보를 위한 셀레니움/#1 Google Screenshots Scrapping/main.py
|
donddog/Nomad_Academy_Online_Course_Codes
|
391fde26052a67f7b533219ab0de6096830697b6
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
import shutil
import os
class GoogleKeywordScreenshooter:
def __init__(self, keyword, screenshots_dir):
self.browser = webdriver.Chrome('../chromedriver.exe')
self.keyword = keyword
self.screenshots_dir = screenshots_dir
def start(self):
try:
if not os.path.exists('screenshots'):
os.makedirs('screenshots')
except Exception:
pass
self.browser.get("https://google.com")
search_bar = self.browser.find_element_by_class_name("gLFyf")
search_bar.send_keys(self.keyword)
search_bar.send_keys(Keys.ENTER)
def repetitive(self):
try:
shitty_element = WebDriverWait(self.browser, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, "g-blk"))
)
self.browser.execute_script(
"""
const shitty = arguments[0];
shitty.parentElement.removeChild(shitty)
""",
shitty_element,
)
except Exception:
pass
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index}.png"
)
# ---------------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[3]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index+10}.png"
)
# ---------------------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[4]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 20}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[5]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 30}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[6]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 40}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[7]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 50}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[8]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 60}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[9]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 70}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[10]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 80}.png"
)
# ----------------------------------------------------------------------
next_page_button = self.browser.find_element_by_xpath('//*[@id="xjs"]/div/table/tbody/tr/td[11]/a')
next_page_button.click()
repetitive(self)
search_results = self.browser.find_elements_by_class_name("g")
for index, search_result in enumerate(search_results):
search_result.screenshot(
f"{self.screenshots_dir}/{self.keyword}x{index + 90}.png"
)
def finish(self):
self.browser.quit()
def tozipfile(self):
shutil.make_archive('screentshotresults', 'zip', 'screenshots')
shutil.rmtree('screenshots/')
domain_competitors = GoogleKeywordScreenshooter("buy domain", "screenshots")
domain_competitors.start()
domain_competitors.finish()
domain_competitors.tozipfile()
# python_competitors = GoogleKeywordScreenshooter("python book", "screenshots")
# python_competitors.start()
# python_competitors.finish()
| 38.171429
| 107
| 0.572156
| 714
| 6,680
| 5.093838
| 0.166667
| 0.075612
| 0.082486
| 0.036294
| 0.675282
| 0.668683
| 0.668683
| 0.668683
| 0.668683
| 0.668683
| 0
| 0.006169
| 0.223503
| 6,680
| 175
| 108
| 38.171429
| 0.695007
| 0.117814
| 0
| 0.458333
| 0
| 0
| 0.182071
| 0.143155
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.016667
| 0.058333
| 0
| 0.108333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
cd314573d937025d1a50953b27cb47b89f485e85
| 2,972
|
py
|
Python
|
yggdrasil/serialize/FunctionalSerialize.py
|
astro-friedel/yggdrasil
|
5ecbfd083240965c20c502b4795b6dc93d94b020
|
[
"BSD-3-Clause"
] | 22
|
2019-02-05T15:20:07.000Z
|
2022-02-25T09:00:40.000Z
|
yggdrasil/serialize/FunctionalSerialize.py
|
astro-friedel/yggdrasil
|
5ecbfd083240965c20c502b4795b6dc93d94b020
|
[
"BSD-3-Clause"
] | 48
|
2019-02-15T20:41:24.000Z
|
2022-03-16T20:52:02.000Z
|
yggdrasil/serialize/FunctionalSerialize.py
|
astro-friedel/yggdrasil
|
5ecbfd083240965c20c502b4795b6dc93d94b020
|
[
"BSD-3-Clause"
] | 16
|
2019-04-27T03:36:40.000Z
|
2021-12-02T09:47:06.000Z
|
from yggdrasil.serialize.SerializeBase import SerializeBase
class FunctionalSerialize(SerializeBase):
r"""Class for serializing/deserializing a Python object into/from a bytes
message using defined functions.
Args:
encoded_datatype (schema, optional): JSON schema describing the type
that serialized objects should conform to. Defaults to the class
attribute default_encoded_datatype. If either func_serialize or
func_deserialize are not provided, this needs to be specified in
order to serialize non-bytes objects.
func_serialize (func, optional): Callable object that takes Python
objects as input and returns a representation that conforms to
encoded_datatype. Defaults to None and the default serialization
for encoded_datatype will be used.
func_deserialize (func, optional): Callable object that takes objects
of a type that conforms to encoded_datatype and returns a
deserialized Python object. Defaults to None and the default
deserialization for encoded_datatype will be used.
**kwargs: Additional keyword args are passed to the parent class's
constructor.
"""
_seritype = 'functional'
_schema_subtype_description = ('Serializer that uses provied function to '
'serialize messages.')
_schema_requried = []
_schema_properties = {
'encoded_datatype': {'type': 'schema'},
'func_serialize': {'type': 'function'},
'func_deserialize': {'type': 'function'}}
func_serialize = None
func_deserialize = None
def __init__(self, **kwargs):
if isinstance(kwargs.get('func_serialize', None), SerializeBase):
kwargs['func_serialize'] = kwargs['func_serialize'].func_serialize
if isinstance(kwargs.get('func_deserialize', None), SerializeBase):
kwargs['func_deserialize'] = kwargs['func_deserialize'].func_deserialize
super(FunctionalSerialize, self).__init__(**kwargs)
# @property
# def base_class(self):
# r"""DefaultSerialize: Default version of serialization."""
# if getattr(self, '_base_class', None) is None:
# self._base_class = DefaultSerialize(datatype=self.typedef,
# **self.serializer_info)
# return self._base_class
# TODO: In some cases this should be the object typedef
# @property
# def typedef(self):
# r"""dict: Type definition."""
# return self.encoded_typedef
@property
def serializer_info(self):
r"""dict: Serializer info."""
raise RuntimeError("Cannot define serializer information for user "
+ "supplied functions.")
@property
def empty_msg(self):
r"""obj: Object indicating empty message."""
return self.encoded_datatype._empty_msg
| 43.072464
| 84
| 0.654778
| 320
| 2,972
| 5.9125
| 0.365625
| 0.063425
| 0.020613
| 0.027484
| 0.15222
| 0.095137
| 0
| 0
| 0
| 0
| 0
| 0
| 0.265814
| 2,972
| 68
| 85
| 43.705882
| 0.867094
| 0.532974
| 0
| 0.071429
| 0
| 0
| 0.236985
| 0
| 0
| 0
| 0
| 0.014706
| 0
| 1
| 0.107143
| false
| 0
| 0.035714
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
cd67c54b1e46edcb715070c6ab83abb9ea55fa6d
| 1,178
|
py
|
Python
|
sloth/simple.py
|
codacy-badger/sloth
|
a4f2118b2f19e55271613d43c785aaf4ab030b5e
|
[
"MIT"
] | 1
|
2021-02-11T12:14:23.000Z
|
2021-02-11T12:14:23.000Z
|
src/sloth/simple.py
|
Legorooj/sloth
|
47f6358349f8545fc475efab19edd6efda3ffbcd
|
[
"MIT"
] | null | null | null |
src/sloth/simple.py
|
Legorooj/sloth
|
47f6358349f8545fc475efab19edd6efda3ffbcd
|
[
"MIT"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2020 Legorooj <[email protected]>
# Copyright (c) 2020 FluffyKoalas <github.com/fluffykoalas>
# This file and all others in this project are licensed under the MIT license.
# Please see the LICENSE file in the root of this repository for more details.
# ----------------------------------------------------------------------------
from .timers import Timer
from .raw import tests, runners
__all__ = [
'call_after', 'time_callable', 'time_eval', 'time_exec'
]
def call_after(seconds, func, args=None, kwargs=None):
Timer(seconds, func, args, kwargs).start()
def time_callable(func, n=2, *args, **kwargs):
test = tests.TestCallableWithArgs(func, *args, **kwargs)
runner = runners.AverageTest(test, n)
return runner.run()
def time_eval(snippet, n=2, gbls=None, lcls=None):
test = tests.TestEval(snippet, gbls, lcls)
runner = runners.AverageTest(test, n)
return runner.run()
def time_exec(snippet, n=2, gbls=None, lcls=None):
test = tests.TestExec(snippet, gbls, lcls)
runner = runners.AverageTest(test, n)
return runner.run()
| 32.722222
| 78
| 0.616299
| 145
| 1,178
| 4.924138
| 0.427586
| 0.033613
| 0.10084
| 0.117647
| 0.341737
| 0.341737
| 0.341737
| 0.341737
| 0.341737
| 0.246499
| 0
| 0.010956
| 0.147708
| 1,178
| 35
| 79
| 33.657143
| 0.700199
| 0.355688
| 0
| 0.315789
| 0
| 0
| 0.054521
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.105263
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
cd6d9b2b982fd93ff60ad7ad2c61547c26a40708
| 3,022
|
py
|
Python
|
movo_common/si_utils/src/si_utils/my_tf_listener.py
|
ALAN-NUS/kinova_movo
|
05a0451f5c563359ae0ffe3280e1df85caec9e55
|
[
"BSD-3-Clause"
] | 1
|
2021-03-26T06:33:28.000Z
|
2021-03-26T06:33:28.000Z
|
movo_common/si_utils/src/si_utils/my_tf_listener.py
|
ALAN-NUS/kinova_movo
|
05a0451f5c563359ae0ffe3280e1df85caec9e55
|
[
"BSD-3-Clause"
] | null | null | null |
movo_common/si_utils/src/si_utils/my_tf_listener.py
|
ALAN-NUS/kinova_movo
|
05a0451f5c563359ae0ffe3280e1df85caec9e55
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import rospy
import math
import tf
import geometry_msgs.msg
from geometry_msgs.msg import PoseStamped
from si_utils.lx_transformerROS import my_transformer
if __name__ == '__main__':
rospy.init_node('my_tf_listener')
listener = tf.TransformListener()
# my_trans = my_transformer()
rate = rospy.Rate(10.0)
while not rospy.is_shutdown():
try:
# look1 = listener.lookupTransform('/left_ee_link', '/link1', rospy.Time(0))
# look2 = listener.lookupTransform('/base_link', '/left_ee_link', rospy.Time(0))
# look3 = listener.lookupTransform('/base_link', '/link1', rospy.Time(0))
# rospy.loginfo(look3)
# rospy.loginfo(look2)
pose = PoseStamped()
pose.header.frame_id = '/link1'
pose2 = listener.transformPose('/base_link', pose)
rospy.loginfo(pose2)
# (trans,rot) = listener.lookupTransform('/base_link', '/ar_marker_1', rospy.Time(0))
# (trans,rot) = listener.lookupTransform('/base_link', '/left_ee_link', rospy.Time(0))
# (trans1,rot1) = listener.lookupTransform('/movo_camera_color_optical_frame', '/ar_marker_17', rospy.Time(0))
# (trans,rot) = listener.lookupTransform('/base_link', '/movo_camera_color_optical_frame', rospy.Time(0))
# (trans,rot) = listener.lookupTransform('/movo_camera_color_optical_frame', '/base_link', rospy.Time(0))
# (trans,rot) = listener.lookupTransform('/base_link', '/ar_marker_1', rospy.Time(0))
# pose = PoseStamped()
# pose.header.frame_id = 'ar_marker_1'
# rospy.loginfo("========== First trans ===========")
# pose1 = listener.transformPose('/movo_camera_color_optical_frame', pose)
# rospy.loginfo(pose1)
# rospy.loginfo("========== Second trans ===========")
# rospy.loginfo(listener.transformPose('/base_link', pose1))
# print(trans)
# print(rot)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
print('test')
# rate.sleep()
'''
pose = PoseStamped()
pose.header.frame_id = '/ar_marker_17'
rospy.loginfo("========== First trans ===========")
listener.waitForTransform("/ar_marker_17", "/movo_camera_color_optical_frame", rospy.Time(), rospy.Duration(4.0))
pose1 = listener.transformPose('/movo_camera_color_optical_frame', pose)
rospy.loginfo(pose1)
rospy.loginfo("========== Second trans ===========")
rospy.loginfo(listener.transformPose('/base_link', pose1))
pose_nutStart_nut = PoseStamped()
pose_nutStart_nut.header.frame_id = '/nutStart'
pose_nutStart_ar = my_trans.tf.transformPose('/ar_marker_17', pose_nutStart_nut)
rospy.loginfo(pose_nutStart_ar)
pose_nutStart_ca = listener.transformPose('/movo_camera_color_optical_frame', pose_nutStart_ar)
rospy.loginfo(pose_nutStart_ca)
'''
| 35.139535
| 122
| 0.634348
| 336
| 3,022
| 5.41369
| 0.235119
| 0.085761
| 0.049478
| 0.084662
| 0.524464
| 0.503573
| 0.485981
| 0.386476
| 0.313909
| 0.277075
| 0
| 0.017977
| 0.208471
| 3,022
| 85
| 123
| 35.552941
| 0.742475
| 0.410655
| 0
| 0
| 0
| 0
| 0.044824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
cd78d6e1151155e18754cebc1cc2d5b9e9efa63f
| 3,267
|
py
|
Python
|
ocellaris/utils/alarm.py
|
TormodLandet/Ocellaris
|
6b4b2515fb881b1ed8d8fd8d8c23a8e1990ada58
|
[
"Apache-2.0"
] | 1
|
2017-11-07T12:19:44.000Z
|
2017-11-07T12:19:44.000Z
|
ocellaris/utils/alarm.py
|
TormodLandet/Ocellaris
|
6b4b2515fb881b1ed8d8fd8d8c23a8e1990ada58
|
[
"Apache-2.0"
] | null | null | null |
ocellaris/utils/alarm.py
|
TormodLandet/Ocellaris
|
6b4b2515fb881b1ed8d8fd8d8c23a8e1990ada58
|
[
"Apache-2.0"
] | 2
|
2018-05-02T17:17:01.000Z
|
2019-03-11T13:09:40.000Z
|
# Copyright (C) 2018-2019 Tormod Landet
# SPDX-License-Identifier: Apache-2.0
"""
A timeout context manager based on SIGALRM, Permits multiple
SIGALRM events to be queued.
Uses a `heapq` to store the objects to be called when an alarm signal is
raised, so that the next alarm is always at the top of the heap.
Note: SIGALRM does not work on Windows!
Code from ActiveState Python recipes
http://code.activestate.com/recipes/577600-queue-for-managing-multiple-sigalrm-alarms-concurr/
modified by stackoverflow user "James":
https://stackoverflow.com/a/34999808
"""
import heapq
import signal
from time import time
alarmlist = []
def __new_alarm(t, f, a, k):
return (t + time(), f, a, k)
def __next_alarm():
return int(round(alarmlist[0][0] - time())) if alarmlist else None
def __set_alarm():
return signal.alarm(max(__next_alarm(), 1))
class AlarmTimeoutError(Exception):
def __init__(self, message, name):
self.message = message
self.name = name
class AlarmTimeout:
"""
Context manager for timeouts
"""
def __init__(self, name, seconds=1, error_message='Timeout'):
self.name = name
self.seconds = seconds
self.error_message = error_message
def handle_timeout(self):
raise AlarmTimeoutError(self.error_message, self.id_)
def __enter__(self):
self.this_alarm = alarm(self.seconds, self.handle_timeout)
def __exit__(self, type, value, traceback):
try:
cancel(self.this_alarm)
except ValueError:
pass
def __clear_alarm():
"""
Clear an existing alarm.
If the alarm signal was set to a callable other than our own, queue the
previous alarm settings.
"""
oldsec = signal.alarm(0)
oldfunc = signal.signal(signal.SIGALRM, __alarm_handler)
if oldsec > 0 and oldfunc != __alarm_handler:
heapq.heappush(alarmlist, (__new_alarm(oldsec, oldfunc, [], {})))
def __alarm_handler(*_args):
"""
Handle an alarm by calling any due heap entries and resetting the alarm.
Note that multiple heap entries might get called, especially if calling an
entry takes a lot of time.
"""
try:
nextt = __next_alarm()
while nextt is not None and nextt <= 0:
(_tm, func, args, keys) = heapq.heappop(alarmlist)
func(*args, **keys)
nextt = __next_alarm()
finally:
if alarmlist:
__set_alarm()
def alarm(sec, func, *args, **keys):
"""
Set an alarm.
When the alarm is raised in `sec` seconds, the handler will call `func`,
passing `args` and `keys`. Return the heap entry (which is just a big
tuple), so that it can be cancelled by calling `cancel()`.
"""
__clear_alarm()
try:
newalarm = __new_alarm(sec, func, args, keys)
heapq.heappush(alarmlist, newalarm)
return newalarm
finally:
__set_alarm()
def cancel(alarm):
"""
Cancel an alarm by passing the heap entry returned by `alarm()`.
It is an error to try to cancel an alarm which has already occurred.
"""
__clear_alarm()
try:
alarmlist.remove(alarm)
heapq.heapify(alarmlist)
finally:
if alarmlist:
__set_alarm()
| 25.724409
| 94
| 0.653811
| 434
| 3,267
| 4.741935
| 0.380184
| 0.017007
| 0.023324
| 0.016521
| 0.044704
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012643
| 0.249464
| 3,267
| 126
| 95
| 25.928571
| 0.826672
| 0.377717
| 0
| 0.305085
| 0
| 0
| 0.003652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.20339
| false
| 0.016949
| 0.050847
| 0.050847
| 0.355932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
269573fa22001d4ea70efb720d1a7ce5724057f7
| 3,520
|
py
|
Python
|
tools/noise_reduction.py
|
8igfive/MyASR
|
565267d9df4b56cfad5107632146aab8150a962d
|
[
"MIT"
] | null | null | null |
tools/noise_reduction.py
|
8igfive/MyASR
|
565267d9df4b56cfad5107632146aab8150a962d
|
[
"MIT"
] | null | null | null |
tools/noise_reduction.py
|
8igfive/MyASR
|
565267d9df4b56cfad5107632146aab8150a962d
|
[
"MIT"
] | null | null | null |
import argparse
from genericpath import exists
import os
import time
import re
from tqdm import tqdm
import numpy as np
from scipy.io import wavfile
from wiener_scalart import wienerScalart
TIME = time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
WORKPLACE_DIR = os.path.dirname(CURRENT_DIR)
DUMP_DIR = os.path.join(WORKPLACE_DIR, os.path.join('dump', TIME))
DUMP_FEAT = 'feat_{}.scp'.format(TIME)
DUMP_TEXT = 'text_{}'.format(TIME)
FEAT_FORMAT = r'\s?(.+?)\s+(.+?\.wav)'
intMap = {np.dtype('int8') : (0x7f, -0x80),
np.dtype('int16') : (0x7fff, -0x8000),
np.dtype('int32') : (0x7fffffff, -0x8000000),
np.dtype('int64') : (0x7fffffffffffffff, -0x8000000000000000)}
def noise_reduct(args, filePath, dumpPath):
sampleRate, musicData = wavfile.read(filePath)
dataType = np.dtype('int16')
musicData.dtype = dataType # FIXME: wavfile 读取的结果数据类型可能有问题
if args.debug:
print(min(musicData), max(musicData), intMap[dataType][0] + 1)
if dataType in intMap:
musicData = musicData / (intMap[dataType][0] + 1)
if args.debug:
print(min(musicData), max(musicData))
newData = wienerScalart(musicData, sampleRate)
if dataType in intMap:
if args.debug:
print(min(newData), max(newData))
newData = newData * (intMap[dataType][0])
newData = newData.astype(dataType)
if args.debug:
print(max(newData), min(newData))
wavfile.write(dumpPath, sampleRate, newData)
def main(args):
if args.feat is None or args.text is None:
print('lack of feat file or text file')
return
if os.path.abspath(args.dumpFeat) != args.dumpFeat:
args.dumpFeat = os.path.join(DUMP_DIR, args.dumpFeat)
if os.path.abspath(args.dumpText) != args.dumpText:
args.dumpText = os.path.join(DUMP_DIR, args.dumpText)
if not os.path.exists(DUMP_DIR):
os.makedirs(DUMP_DIR)
with open(args.feat, 'r', encoding='utf8') as f:
dataPairs = re.findall(FEAT_FORMAT, f.read())
with open(args.dumpFeat, 'w', encoding='utf8') as f:
for i in tqdm(range(len(dataPairs))):
dataPair = dataPairs[i]
pathList = os.path.split(dataPair[1])
dumpPath = os.path.join(args.dumpDir, pathList[-1])
f.write('{} {}\n'.format(dataPair[0], dumpPath))
noise_reduct(args, dataPair[1], dumpPath)
with open(args.text, 'r', encoding='utf8') as fin:
with open(args.dumpText, 'w', encoding='utf8') as fout:
fout.write(fin.read())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--feat', type=str, default=None, help='feat file path')
parser.add_argument('-t', '--text', type=str, default=None, help='text file path')
parser.add_argument('-dd', '--dumpDir', type=str, default=DUMP_DIR, help='the directory where holds new .wav files')
parser.add_argument('-df', '--dumpFeat', type=str, default=os.path.join(DUMP_DIR, DUMP_FEAT), help='dump feat file path')
parser.add_argument('-dt', '--dumpText', type=str, default=os.path.join(DUMP_DIR, DUMP_TEXT), help='dump text file path')
parser.add_argument('-n', '--noiseLength', type=float, default=0.25, help='the noise time length at the beggining of the audio')
parser.add_argument('-db', '--debug', action='store_true', help='print debug message')
args = parser.parse_args()
main(args)
| 40.930233
| 132
| 0.65142
| 476
| 3,520
| 4.722689
| 0.292017
| 0.037367
| 0.031139
| 0.031139
| 0.201068
| 0.156139
| 0.066726
| 0.066726
| 0.031139
| 0
| 0
| 0.023272
| 0.194318
| 3,520
| 86
| 133
| 40.930233
| 0.769394
| 0.008239
| 0
| 0.083333
| 0
| 0
| 0.118625
| 0.006017
| 0
| 0
| 0.02149
| 0.011628
| 0
| 1
| 0.027778
| false
| 0
| 0.125
| 0
| 0.166667
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26982c5744b84289c1df298a112687956cf70fd9
| 515
|
py
|
Python
|
Dataset/Leetcode/train/46/372.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/46/372.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/46/372.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution(object):
def XXX(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
def dfs(res, state, n):
if n == 0:
res.append(list(state))
return
for a in nums:
if a not in state:
state.append(a)
dfs(res, state, n-1)
state.pop()
res = []
dfs(res, [], len(nums))
return res
| 23.409091
| 40
| 0.365049
| 54
| 515
| 3.481481
| 0.481481
| 0.095745
| 0.117021
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.518447
| 515
| 21
| 41
| 24.52381
| 0.75
| 0.087379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
2698d48d436f6968997ba06d73587a502b5f3874
| 535
|
py
|
Python
|
main.py
|
Cynicism-lab/hhu-daily-health-common
|
1959beee61f2895166ac0be92b5817cbe278ef51
|
[
"MIT"
] | 14
|
2021-02-03T14:38:15.000Z
|
2022-02-05T08:48:41.000Z
|
main.py
|
zhang-zimin/nanwenguidaka
|
e426ecee8758d70b20cf2a77dc87a6f949196927
|
[
"MIT"
] | 4
|
2021-04-15T15:14:29.000Z
|
2022-03-15T02:29:04.000Z
|
main.py
|
zhang-zimin/nanwenguidaka
|
e426ecee8758d70b20cf2a77dc87a6f949196927
|
[
"MIT"
] | 52
|
2021-02-03T14:38:17.000Z
|
2022-03-29T09:19:12.000Z
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import requests
from datetime import timezone
from datetime import timedelta
from datetime import datetime
import hhu
import os
# In[2]:
utc_time = datetime.utcnow().replace(tzinfo=timezone.utc)
sh_tz = timezone(timedelta(hours=8),name='Asia/Shanghai')
beijing_now = utc_time.astimezone(sh_tz)
datestr = datetime.strftime(beijing_now,'%F')
timestr = datetime.strftime(beijing_now,'%H:%M:%S')
year = datestr[0:4]
month = datestr[5:7]
day = datestr[8:10]
time = timestr
hhu.hhu()
| 17.258065
| 57
| 0.736449
| 83
| 535
| 4.662651
| 0.566265
| 0.144703
| 0.139535
| 0.134367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023404
| 0.121495
| 535
| 30
| 58
| 17.833333
| 0.8
| 0.08972
| 0
| 0
| 0
| 0
| 0.047718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
269c9b7326d9d1d9ee7b7fac3ea362f209cd1e0f
| 1,599
|
py
|
Python
|
tests/test_formats.py
|
antmicro/raviewer
|
7529664d37e994d4c2f4c450a5577b79d73c4bb0
|
[
"Apache-2.0"
] | 12
|
2021-11-18T09:38:34.000Z
|
2022-03-24T19:33:44.000Z
|
tests/test_formats.py
|
antmicro/raviewer
|
7529664d37e994d4c2f4c450a5577b79d73c4bb0
|
[
"Apache-2.0"
] | 1
|
2022-02-14T12:07:02.000Z
|
2022-03-21T19:29:11.000Z
|
tests/test_formats.py
|
antmicro/raviewer
|
7529664d37e994d4c2f4c450a5577b79d73c4bb0
|
[
"Apache-2.0"
] | null | null | null |
"""Module for testing formats on resources entities"""
from raviewer.src.core import (get_displayable, load_image, parse_image)
from terminaltables import AsciiTable
from raviewer.image.color_format import AVAILABLE_FORMATS
import os
import pkg_resources
import time
import pytest
@pytest.fixture
def formats():
return AVAILABLE_FORMATS
def test_all(formats):
"""Test all formats"""
print("Testing all formats, It may take a while...")
table_data = [["Format", "Passed", "Performance"]]
start_range = 800
end_range = 810
for color_format in formats.keys():
file_path = pkg_resources.resource_filename('resources',
color_format + "_1000_750")
passed_results = 0
format_performance = 0
start = time.time()
for width in range(start_range, end_range):
try:
if not os.path.exists(file_path):
break
img = load_image(file_path)
img = parse_image(img.data_buffer, color_format, width)
get_displayable(img)
passed_results += 1
except:
continue
end = time.time()
#Stats
format_performance = "{:.3f}".format(round(end - start, 3))
table_data.append([
color_format, "{}/{}".format(passed_results,
end_range - start_range),
format_performance
])
table = AsciiTable(table_data)
table.title = 'Test all formats'
print(table.table)
| 31.352941
| 79
| 0.590994
| 176
| 1,599
| 5.153409
| 0.414773
| 0.060639
| 0.046307
| 0.041896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016544
| 0.319575
| 1,599
| 50
| 80
| 31.98
| 0.817096
| 0.044403
| 0
| 0
| 0
| 0
| 0.073171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0.097561
| 0.170732
| 0.02439
| 0.243902
| 0.04878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
26a6c6bf63ea22f6476519c7dcbec3b7eb479136
| 550
|
py
|
Python
|
generators/activerecord/templates/models/abstract_model.py
|
afeiship/gx
|
9a3b566f64147eb1b6ffc753e2ebdf676f8ddef6
|
[
"MIT"
] | null | null | null |
generators/activerecord/templates/models/abstract_model.py
|
afeiship/gx
|
9a3b566f64147eb1b6ffc753e2ebdf676f8ddef6
|
[
"MIT"
] | null | null | null |
generators/activerecord/templates/models/abstract_model.py
|
afeiship/gx
|
9a3b566f64147eb1b6ffc753e2ebdf676f8ddef6
|
[
"MIT"
] | null | null | null |
from orator import Model
import pendulum
class AbstractModel(Model):
__guarded__ = []
@classmethod
def find_or_new_by(cls, options):
entity = cls.find_by(options)
if not entity:
entity = cls()
for k in options:
v = options[k]
setattr(entity, k, v)
return entity
@classmethod
def find_by(cls, options):
return cls.where(options).first()
# normalize timezone
def fresh_timestamp(self):
return pendulum.now('Asia/Shanghai')
| 22
| 44
| 0.589091
| 63
| 550
| 4.984127
| 0.555556
| 0.089172
| 0.11465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.323636
| 550
| 24
| 45
| 22.916667
| 0.844086
| 0.032727
| 0
| 0.111111
| 0
| 0
| 0.024528
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.111111
| 0.111111
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
26ab7ad4f1a2c561a6b79626d7ff5c41db51de8f
| 365
|
py
|
Python
|
python/concepts/compile-runtime.py
|
shanavas786/coding-fu
|
2f29781ab793b5932a951259afffbbe0078e8a8a
|
[
"CC0-1.0"
] | 1
|
2021-02-12T03:49:13.000Z
|
2021-02-12T03:49:13.000Z
|
python/concepts/compile-runtime.py
|
shanavas786/coding-fu
|
2f29781ab793b5932a951259afffbbe0078e8a8a
|
[
"CC0-1.0"
] | null | null | null |
python/concepts/compile-runtime.py
|
shanavas786/coding-fu
|
2f29781ab793b5932a951259afffbbe0078e8a8a
|
[
"CC0-1.0"
] | null | null | null |
def func(arg1, arg2=dict()):
print('entering func')
# arg3 is evaluated at compile time of inner
# so it capture arg3 as {}
def inner(arg3=arg2):
# arg1 is evaluted when inner is called
# so it uses the value of arg1 at that time
# whic is None
print("arg1", arg1, "arg3", arg3)
arg1 = arg2 = None
return inner
inn = func(1)
inn()
| 20.277778
| 47
| 0.635616
| 59
| 365
| 3.932203
| 0.525424
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.260274
| 365
| 17
| 48
| 21.470588
| 0.803704
| 0.438356
| 0
| 0
| 0
| 0
| 0.106061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.375
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26b279e2de1f9a9dd6eeef9e0736a69c9c2cc2f9
| 1,027
|
py
|
Python
|
chmap/examples/development/example109_store-creds.py
|
predsci/CHD
|
35f29d1b62861f4ffed57b38d18689b282664bcf
|
[
"Apache-2.0"
] | 3
|
2021-06-29T00:23:47.000Z
|
2021-09-17T18:29:05.000Z
|
chmap/examples/development/example109_store-creds.py
|
predsci/CHD
|
35f29d1b62861f4ffed57b38d18689b282664bcf
|
[
"Apache-2.0"
] | null | null | null |
chmap/examples/development/example109_store-creds.py
|
predsci/CHD
|
35f29d1b62861f4ffed57b38d18689b282664bcf
|
[
"Apache-2.0"
] | 1
|
2021-12-08T06:26:18.000Z
|
2021-12-08T06:26:18.000Z
|
# This is a little bit clunky, but is a better solution than writing passwords into
import os
from cryptography.fernet import Fernet
# cred_dir = os.path.join(os.path.dirname(os.getcwd()), "settings")
cred_dir = '/Users/cdowns/work/imac_local/CoronalHoles/mysql_credentials'
key_file = os.path.join(cred_dir, "e_key.bin")
# Generate a new local encryption key if needed
if not os.path.exists(key_file):
key = Fernet.generate_key()
# print(key)
with open(key_file, 'wb') as file_object:
file_object.write(key)
else:
with open(key_file, 'rb') as file_object:
for line in file_object:
key = line
# User inputs password interactively so it is never saved
passw = input("Enter a password to encrypt and save: ")
cipher_suite = Fernet(key)
ciphered_text = cipher_suite.encrypt(passw.encode()) # required to be bytes
creds_file = os.path.join(cred_dir, "e_cred.bin")
print("Writing credential file")
with open(creds_file, 'wb') as file_object:
file_object.write(ciphered_text)
| 31.121212
| 83
| 0.728335
| 162
| 1,027
| 4.462963
| 0.5
| 0.082988
| 0.041494
| 0.038728
| 0.152144
| 0.152144
| 0.152144
| 0.091286
| 0
| 0
| 0
| 0
| 0.166504
| 1,027
| 32
| 84
| 32.09375
| 0.844626
| 0.273612
| 0
| 0
| 0
| 0
| 0.197564
| 0.081191
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.105263
| 0.105263
| 0
| 0.105263
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
26b56046672f411c1c88bcbb0a2ebddb8ba65691
| 176
|
py
|
Python
|
competitive-programming/kattis/heimavinna.py
|
sanchopanca/coding-for-pleasure
|
fed1910e8a5a4241bd55aed333afd79b4405a71d
|
[
"MIT"
] | null | null | null |
competitive-programming/kattis/heimavinna.py
|
sanchopanca/coding-for-pleasure
|
fed1910e8a5a4241bd55aed333afd79b4405a71d
|
[
"MIT"
] | null | null | null |
competitive-programming/kattis/heimavinna.py
|
sanchopanca/coding-for-pleasure
|
fed1910e8a5a4241bd55aed333afd79b4405a71d
|
[
"MIT"
] | null | null | null |
s = 0
problems = input().strip().split(';')
for p in problems:
if '-' in p:
a, b = map(int, p.split('-'))
s += b - a + 1
else:
s += 1
print(s)
| 16
| 37
| 0.420455
| 28
| 176
| 2.642857
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026786
| 0.363636
| 176
| 10
| 38
| 17.6
| 0.633929
| 0
| 0
| 0
| 0
| 0
| 0.017045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26bc7d495f1e995a46390a4be5f8a0a4a460b0ab
| 2,823
|
py
|
Python
|
src/shark/poker/player.py
|
twoodruff01/shark
|
6c183de1993492b614eff332548f5c0f57facda2
|
[
"Apache-2.0"
] | null | null | null |
src/shark/poker/player.py
|
twoodruff01/shark
|
6c183de1993492b614eff332548f5c0f57facda2
|
[
"Apache-2.0"
] | null | null | null |
src/shark/poker/player.py
|
twoodruff01/shark
|
6c183de1993492b614eff332548f5c0f57facda2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 Thomas Woodruff
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import cli
class Player():
def __init__(self, index, agent):
self.index = index
self.hand = list()
self.agent = agent()
self.chips = 500 # TODO: Chips could be a class of their own or enums (discretisation).
self.is_little_blind = False
self.is_big_blind = False
self.folded = False
def receive_card(self, card):
self.hand.append(card)
def get_action(self, game_state):
'''
TODO: Add the timeout in here.
TODO: Can use some fancy multiprocessing here if you want, to implement strict timeouts.
TODO: Pass in modified copy of self (to avoid changing chips).
'''
return self.agent.get_action(game_state, self)
def small_blind_bet(self):
'''
TODO: Do something if they've run out of money or don't have enough.
TODO: Allow changing the blind sizes somehow.
'''
self.is_little_blind = True
if self.chips < cli.BUY_IN // 2:
raise NotImplementedError("small blind doesn't have enough chips")
elif self.chips == cli.BUY_IN // 2:
raise NotImplementedError("small blind has exactly sufficient chips")
else:
self.chips -= cli.BUY_IN // 2
return cli.BUY_IN // 2
def big_blind_bet(self):
'''
TODO: Do something if they've run out of money or don't have enough.
TODO: Allow changing the blind sizes somehow.
'''
self.is_big_blind = True
if self.chips < cli.BUY_IN:
raise NotImplementedError("big blind doesn't have enough chips")
elif self.chips == cli.BUY_IN // 2:
raise NotImplementedError("big blind has exactly sufficient chips")
else:
self.chips -= cli.BUY_IN
return cli.BUY_IN
def has_funds(self, amount):
return amount <= self.chips
def take_bet(self, amount):
if self.chips - amount < 0:
raise Exception('tried to decrement player chips below 0')
self.chips -= amount
def __str__(self):
return f'{[str(c) for c in self.hand]}'
| 37.144737
| 106
| 0.616366
| 381
| 2,823
| 4.475066
| 0.391076
| 0.052786
| 0.037537
| 0.052786
| 0.323754
| 0.323754
| 0.323167
| 0.323167
| 0.300293
| 0.300293
| 0
| 0.00916
| 0.303932
| 2,823
| 75
| 107
| 37.64
| 0.858524
| 0.377258
| 0
| 0.1
| 0
| 0
| 0.132442
| 0
| 0
| 0
| 0
| 0.106667
| 0
| 1
| 0.2
| false
| 0
| 0.025
| 0.05
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26bd97f61aa8677a52c1050fed16514074e239fd
| 2,004
|
py
|
Python
|
hp_steam_data/src/main.py
|
wangzhefeng/data-analysis
|
e502ac49ae1bc8287243e1faf51c467cc4d4187c
|
[
"MIT"
] | null | null | null |
hp_steam_data/src/main.py
|
wangzhefeng/data-analysis
|
e502ac49ae1bc8287243e1faf51c467cc4d4187c
|
[
"MIT"
] | null | null | null |
hp_steam_data/src/main.py
|
wangzhefeng/data-analysis
|
e502ac49ae1bc8287243e1faf51c467cc4d4187c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import os
PROJECT_PATH = "/mnt/e/dev/test/hp_steam_data/"
DATA_PATH = os.path.join(PROJECT_PATH, "data")
RESULT_PATH = os.path.join(PROJECT_PATH, "result")
def get_origin_data():
"""
origin data
"""
# raw data
eturb_m1_data = pd.read_csv("/mnt/e/dev/test/hp_steam_data/data/eturb_m1_1min_metrics-0817.csv", header = 0, index_col = None)
eturb_m2_data = pd.read_csv("/mnt/e/dev/test/hp_steam_data/data/eturb_m2_1min_metrics-0817.csv", header = 0, index_col = None)
boiler_m1_data = pd.read_csv("/mnt/e/dev/test/hp_steam_data/data/boiler_m1_1min_outlet_steam_flow.csv", header = 0, index_col = None)
boiler_m3_data = pd.read_csv("/mnt/e/dev/test/hp_steam_data/data/boiler_m3_1min_outlet_steam_flow.csv", header = 0, index_col = None)
steampipeline_p1_data = pd.read_csv("/mnt/e/dev/test/hp_steam_data/data/steampipeline_p1_1min_hp_steam_pressure.csv", header = 0, index_col = None)
# data aggregate
df = pd.DataFrame()
# eturb_m1
df["eturb_m1_steam_flow_in"] = eturb_m1_data["ExtCondensTurbineOP.steam_flow_in"]
df["eturb_m2_steam_flow_in"] = eturb_m2_data["ExtCondensTurbineOP.steam_flow_in"]
df["boiler_m1_outlet_steam_flow"] = boiler_m1_data["CFBoilerOP.outlet_steam_flow"]
df["boiler_m3_outlet_steam_flow"] = boiler_m3_data["CFBoilerOP.outlet_steam_flow"]
df["steampipeline_p1_hp_steam_pressure"] = steampipeline_p1_data["SteamPipelineOP.hp_steam_pressure"]
df["boiler_steam_flow"] = df["boiler_m1_outlet_steam_flow"] + df["boiler_m3_outlet_steam_flow"]
df["turbine_steam_flow"] = df["eturb_m1_steam_flow_in"] + df["eturb_m2_steam_flow_in"]
df = df.reset_index(drop = True)
return df
def main():
# print(os.listdir(DATA_PATH))
df = get_origin_data()
print(df.head())
# df.to_csv("/mnt/e/dev/test/hp_steam_data/result/steam_pressure_data.csv", index = None)
if __name__ == "__main__":
main()
| 36.436364
| 151
| 0.733034
| 325
| 2,004
| 4.095385
| 0.209231
| 0.108189
| 0.090158
| 0.057851
| 0.616078
| 0.616078
| 0.437265
| 0.428249
| 0.389932
| 0.227648
| 0
| 0.025877
| 0.132236
| 2,004
| 54
| 152
| 37.111111
| 0.739505
| 0.102295
| 0
| 0
| 0
| 0
| 0.461365
| 0.431472
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.111111
| 0
| 0.222222
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26be6c749fedecdd29350e7dc06917fe50136ca1
| 556
|
py
|
Python
|
Modulo-02/ex058/ex058.py
|
Matheus-Henrique-Burey/Curso-de-Python
|
448aebaab96527affa1e45897a662bb0407c11c6
|
[
"MIT"
] | null | null | null |
Modulo-02/ex058/ex058.py
|
Matheus-Henrique-Burey/Curso-de-Python
|
448aebaab96527affa1e45897a662bb0407c11c6
|
[
"MIT"
] | null | null | null |
Modulo-02/ex058/ex058.py
|
Matheus-Henrique-Burey/Curso-de-Python
|
448aebaab96527affa1e45897a662bb0407c11c6
|
[
"MIT"
] | null | null | null |
from random import randint
print('=-' * 15)
print('ADIVINHE EM QUE NUMERO ESTOU PENÇANDO')
print('=-' * 15)
pc = randint(0, 10)
num = 11
cont = 0
while pc != num:
num = int(input('Sera que voce consegue acertar o numero que pensei, entre 0, 10: '))
if num == pc:
print('PARABES!!! VOCE ACERTOU')
else:
if num < pc:
print('Mais...', end=' ')
else:
print('Menos...', end=' ')
print('Tente novamente')
print('-' * 20)
cont += 1
print(f'Voce tentou {cont} vezes para acertar')
| 23.166667
| 89
| 0.546763
| 74
| 556
| 4.108108
| 0.581081
| 0.046053
| 0.046053
| 0.078947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040506
| 0.289568
| 556
| 23
| 90
| 24.173913
| 0.729114
| 0
| 0
| 0.2
| 0
| 0
| 0.357914
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0.45
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
26c4a3b8183960d6aad7bfb1532a7a0f533eda4e
| 4,087
|
py
|
Python
|
flask_simpleview/__init__.py
|
jackwardell/Flask-SimpleView
|
813d5dbe6353f77016c893caff526abae8487492
|
[
"Apache-2.0"
] | null | null | null |
flask_simpleview/__init__.py
|
jackwardell/Flask-SimpleView
|
813d5dbe6353f77016c893caff526abae8487492
|
[
"Apache-2.0"
] | null | null | null |
flask_simpleview/__init__.py
|
jackwardell/Flask-SimpleView
|
813d5dbe6353f77016c893caff526abae8487492
|
[
"Apache-2.0"
] | null | null | null |
import re
import flask
import flask.views
from functools import wraps
def camel_case_to_snake_case(word):
"""very simple mechanism for turning CamelCase words into snake_case"""
return re.sub(r"(?<!^)(?=[A-Z])", "_", word).lower()
def camel_case_to_slug_case(word):
"""very simple mechanism for turning CamelCase words into slug-case"""
return re.sub(r"(?<!^)(?=[A-Z])", "-", word).lower()
class NoTemplate(Exception):
"""exception for when no template is passed either as a param or in the class"""
pass
class SkeletonMixin:
"""Mixin for the flask Skeleton (shared methods between Flask and Blueprint)"""
def add_url_rule(self, *args, **kwargs):
raise NotImplementedError()
def add_view(self, view):
# make all the elements for the adding of a url_rule
# make rule
rule = view.make_rule()
# make endpoint
endpoint = view.make_endpoint()
# make view_func
view_func = view.as_view(endpoint)
# make a list of all the view functions to add
view_funcs = []
# iterate through all the registered methods
for method in view.methods:
# get the function itself
func = getattr(view, method.lower())
# if the function has been decorated with a __rule_extension__
# we can grab it and make the extended rule
if hasattr(func, "__rule_extension__"):
# make the extended rule
extended_rule = rule + func.__rule_extension__
# save the new rule and view func as params for add_url_rule
params = {"rule": extended_rule, "view_func": view_func}
else:
# else we do it without the rule extension
params = {"rule": rule, "view_func": view_func}
# append the method to the list of view funcs
view_funcs.append(params)
# finally, iterate through the view_funcs and add the url_rule
for params in view_funcs:
self.add_url_rule(**params)
def add_api(self, api):
return self.add_view(api)
class Flask(flask.Flask, SkeletonMixin):
"""The flask.Flask application"""
pass
class Blueprint(flask.Blueprint, SkeletonMixin):
"""The flask.Blueprint blueprint"""
pass
class ViewConstructor:
"""mechanism for construction of endpoint and rule"""
methods = []
@classmethod
def get_name(cls):
return getattr(cls, "name", cls.__name__)
@classmethod
def make_endpoint(cls):
return getattr(cls, "endpoint", camel_case_to_snake_case(cls.get_name()))
@classmethod
def make_rule(cls):
return getattr(cls, "rule", "/" + camel_case_to_slug_case(cls.get_name()))
@classmethod
def iter_methods(cls):
for method in cls.methods:
func = getattr(cls, method.lower())
def extends_rule(rule):
def extend_rule(func):
# assert False
# cls, method = func.__qualname__.split('.')
func.__rule_extension__ = rule
return func
# @wraps(func)
# def decorator(*args, **kwargs):
# return func(*args, **kwargs)
#
# return decorator
return extend_rule
class SimpleView(flask.views.MethodView, ViewConstructor):
def render_template(self, *optional_template_name_or_list, **context):
if not hasattr(self, "template") and not optional_template_name_or_list:
raise NoTemplate("No template passed or found on the view")
template_name_or_list = (
optional_template_name_or_list[0]
if optional_template_name_or_list
else self.template
)
return flask.render_template(template_name_or_list, **context)
def __getattr__(self, attr):
return getattr(flask, attr)
def __repr__(self):
rv = '<{}(rule="{}", endpoint="{}", methods={})>'.format(
self.__class__.__name__, self.rule, self.endpoint, self.methods
)
return rv
API = View = SimpleView
| 28.381944
| 84
| 0.62711
| 508
| 4,087
| 4.807087
| 0.248032
| 0.022932
| 0.034398
| 0.044226
| 0.187551
| 0.09009
| 0.067158
| 0.067158
| 0.067158
| 0.045045
| 0
| 0.000337
| 0.274284
| 4,087
| 143
| 85
| 28.58042
| 0.822994
| 0.261806
| 0
| 0.097222
| 0
| 0
| 0.061466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.194444
| false
| 0.055556
| 0.055556
| 0.069444
| 0.5
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
26ccd5bc1d5e387e612a0f077f3e861929e6b021
| 2,972
|
py
|
Python
|
toolbox/exp/Experiment.py
|
LinXueyuanStdio/KGE-toolbox
|
916842835e61ba99dde1409592977a2ec55f8aae
|
[
"Apache-2.0"
] | 2
|
2021-10-17T17:50:24.000Z
|
2021-12-13T05:22:46.000Z
|
toolbox/exp/Experiment.py
|
LinXueyuanStdio/KGE-toolbox
|
916842835e61ba99dde1409592977a2ec55f8aae
|
[
"Apache-2.0"
] | null | null | null |
toolbox/exp/Experiment.py
|
LinXueyuanStdio/KGE-toolbox
|
916842835e61ba99dde1409592977a2ec55f8aae
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from toolbox.exp.OutputSchema import OutputSchema
from toolbox.utils.LaTeXSotre import EvaluateLaTeXStoreSchema
from toolbox.utils.MetricLogStore import MetricLogStoreSchema
from toolbox.utils.ModelParamStore import ModelParamStoreSchema
from toolbox.utils.Visualize import VisualizeSchema
class Experiment:
def __init__(self, output: OutputSchema, local_rank: int = -1):
self.output = output
self.local_rank = local_rank
self.debug = self.log_in_main_node(output.logger.debug)
self.log = self.log_in_main_node(output.logger.info)
self.warn = self.log_in_main_node(output.logger.warn)
self.error = self.log_in_main_node(output.logger.error)
self.critical = self.log_in_main_node(output.logger.critical)
self.success = self.log_in_main_node(output.logger.success)
self.fail = self.log_in_main_node(output.logger.failed)
self.vis = VisualizeSchema(str(output.pathSchema.dir_path_visualize))
self.model_param_store = ModelParamStoreSchema(output.pathSchema)
self.metric_log_store = MetricLogStoreSchema(str(output.pathSchema.dir_path_log))
self.latex_store = EvaluateLaTeXStoreSchema(output.pathSchema)
def re_init(self, output: OutputSchema, local_rank: int = -1):
self.output = output
self.local_rank = local_rank
self.debug = self.log_in_main_node(output.logger.debug)
self.log = self.log_in_main_node(output.logger.info)
self.warn = self.log_in_main_node(output.logger.warn)
self.error = self.log_in_main_node(output.logger.error)
self.critical = self.log_in_main_node(output.logger.critical)
self.success = self.log_in_main_node(output.logger.success)
self.fail = self.log_in_main_node(output.logger.failed)
self.vis = VisualizeSchema(str(output.pathSchema.dir_path_visualize))
self.model_param_store = ModelParamStoreSchema(output.pathSchema)
self.metric_log_store = MetricLogStoreSchema(str(output.pathSchema.dir_path_log))
self.latex_store = EvaluateLaTeXStoreSchema(output.pathSchema)
def log_in_main_node(self, log_func):
if self.local_rank == 0:
return log_func
return lambda x: [x]
def dump_model(self, model):
self.debug(model)
self.debug("")
self.debug("Trainable parameters:")
num_params = 0
for name, param in model.named_parameters():
if param.requires_grad:
ps = np.prod(param.size())
num_params += ps
self.debug(f"{name}: {sizeof_fmt(ps)}")
self.log('Total Parameters: %s' % sizeof_fmt(num_params))
self.debug("")
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
| 44.358209
| 89
| 0.685397
| 390
| 2,972
| 5.002564
| 0.230769
| 0.064582
| 0.069195
| 0.099949
| 0.635572
| 0.635572
| 0.635572
| 0.635572
| 0.635572
| 0.635572
| 0
| 0.007197
| 0.205249
| 2,972
| 66
| 90
| 45.030303
| 0.818798
| 0
| 0
| 0.491228
| 0
| 0
| 0.033311
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087719
| false
| 0
| 0.105263
| 0
| 0.280702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
26dc6cf0e3afad0c2ebf41ec4b792f1e330897c5
| 2,963
|
py
|
Python
|
hvo_api/model/gps.py
|
wtollett-usgs/hvo_api
|
cdd39cb74d28a931cac4b843a71c5d8435f4620c
|
[
"CC0-1.0"
] | null | null | null |
hvo_api/model/gps.py
|
wtollett-usgs/hvo_api
|
cdd39cb74d28a931cac4b843a71c5d8435f4620c
|
[
"CC0-1.0"
] | null | null | null |
hvo_api/model/gps.py
|
wtollett-usgs/hvo_api
|
cdd39cb74d28a931cac4b843a71c5d8435f4620c
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from valverest.database import db7 as db
from sqlalchemy.ext.hybrid import hybrid_property
class Solution(db.Model):
__tablename__ = 'solutions'
__bind_key__ = 'gps'
sid = db.Column(db.Integer, primary_key=True)
cid = db.Column(db.Integer, primary_key=True)
x = db.Column(db.Float)
y = db.Column(db.Float)
z = db.Column(db.Float)
sxx = db.Column(db.Float)
syy = db.Column(db.Float)
szz = db.Column(db.Float)
sxy = db.Column(db.Float)
sxz = db.Column(db.Float)
syz = db.Column(db.Float)
# Relationships
source = db.relationship('GPSSource', uselist=False)
channel = db.relationship('GPSChannel', uselist=False)
class GPSSource(db.Model):
__tablename__ = 'sources'
__bind_key__ = 'gps'
sid = db.Column(db.Integer, db.ForeignKey('solutions.sid'),
primary_key=True)
name = db.Column(db.String(255))
hash = db.Column(db.String(32))
date0 = db.Column('j2ksec0', db.Float, primary_key=True)
date1 = db.Column('j2ksec1', db.Float, primary_key=True)
rid = db.Column(db.Integer)
# Relationships
rank = db.relationship('GPSRank', uselist=False)
@hybrid_property
def avgdate(self):
return (self.date0 + self.date1) / 2
class GPSChannel(db.Model):
__tablename__ = 'channels'
__bind_key__ = 'gps'
cid = db.Column(db.Integer, db.ForeignKey('solutions.cid'),
primary_key=True)
code = db.Column(db.String(16))
name = db.Column(db.String(255))
lon = db.Column(db.Float)
lat = db.Column(db.Float)
height = db.Column(db.Float)
ctid = db.Column(db.Integer)
class GPSRank(db.Model):
__tablename__ = 'ranks'
__bind_key__ = 'gps'
rid = db.Column(db.Integer, db.ForeignKey('sources.rid'), primary_key=True)
name = db.Column(db.String(24))
rank = db.Column(db.Integer)
class GPSDataPoint(object):
def __init__(self, t, r, x, y, z, sxx, syy, szz, sxy, sxz, syz, nlen):
self._t = t
self._r = r
self._x = x
self._y = y
self._z = z
self._sxx = sxx
self._syy = syy
self._szz = szz
self._sxy = sxy
self._sxz = sxz
self._syz = syz
self._nlen = nlen
@property
def t(self):
return self._t
@property
def r(self):
return self._r
@property
def x(self):
return self._x
@property
def y(self):
return self._y
@property
def z(self):
return self._z
@property
def sxx(self):
return self._sxx
@property
def syy(self):
return self._syy
@property
def szz(self):
return self._szz
@property
def sxy(self):
return self._sxy
@property
def sxz(self):
return self._sxz
@property
def syz(self):
return self._syz
@property
def nlen(self):
return self._nlen
| 22.44697
| 79
| 0.59838
| 397
| 2,963
| 4.282116
| 0.201511
| 0.127059
| 0.147059
| 0.105882
| 0.236471
| 0.168824
| 0.136471
| 0.075294
| 0
| 0
| 0
| 0.010703
| 0.274722
| 2,963
| 131
| 80
| 22.618321
| 0.780363
| 0.016537
| 0
| 0.20202
| 0
| 0
| 0.04055
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141414
| false
| 0
| 0.020202
| 0.131313
| 0.727273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
26e0374db2378f11fc9bfc31927fa2a8ccdcf58c
| 1,995
|
py
|
Python
|
src/blog/templatetags/timediffer.py
|
codewithrakib/first-django-blog
|
339f5833025b0758f391c7c8e0979ca2eefd1b52
|
[
"MIT"
] | null | null | null |
src/blog/templatetags/timediffer.py
|
codewithrakib/first-django-blog
|
339f5833025b0758f391c7c8e0979ca2eefd1b52
|
[
"MIT"
] | 7
|
2021-03-19T02:00:00.000Z
|
2022-02-10T10:26:38.000Z
|
src/blog/templatetags/timediffer.py
|
codewithrakib/first-django-blog
|
339f5833025b0758f391c7c8e0979ca2eefd1b52
|
[
"MIT"
] | null | null | null |
from django import template
from datetime import datetime
from datetime import date
from datetime import time
from datetime import timedelta
register = template.Library()
@register.filter
def timediffer(now, posttime):
posttime = posttime.replace(tzinfo=None)
timedif= now -posttime
timestr=""
if timedif.days >= 365:
gettime = (int)(timedif.days/365)
if gettime==1:
timestr = f"about {gettime} year ago"
else:
timestr = f"about {gettime} years ago"
elif timedif.days >= 30 and timedif.days < 365:
gettime = (int)(timedif.days/30)
if gettime==1:
timestr= f"about {gettime} month ago"
else:
timestr= f"about {gettime} months ago"
elif timedif.days>=7 and timedif.days < 30:
gettime = (int)(timedif.days/7)
if gettime==1:
timestr=f"about {gettime} week ago"
else:
timestr=f"about {gettime} weeks ago"
elif timedif.days>=1 and timedif.days < 7:
gettime = (int)(timedif.days)
if gettime==1:
timestr=f"about {gettime} day ago"
else:
timestr=f"about {gettime} days ago"
elif timedif.seconds>=3600 and timedif.days < 1:
gettime = (int)(timedif.seconds/3600)
if gettime==1:
timestr=f"about {gettime} hour ago"
else:
timestr=f"about {gettime} hours ago"
elif timedif.seconds>=60 and timedif.seconds < 3600:
gettime = (int)(timedif.seconds/60)
if gettime==1:
timestr = f"about {gettime} minute ago"
else:
timestr = f"about {gettime} minutes ago"
elif timedif.seconds>=1 and timedif.seconds < 60:
gettime = (int)(timedif.seconds)
if gettime==1:
timestr = f"about {gettime} second ago"
else:
timestr = f"about {gettime} seconds ago"
else:
timestr='now'
return timestr
| 30.227273
| 56
| 0.578947
| 240
| 1,995
| 4.8125
| 0.208333
| 0.09697
| 0.157576
| 0.242424
| 0.406061
| 0.406061
| 0.242424
| 0
| 0
| 0
| 0
| 0.033724
| 0.316291
| 1,995
| 66
| 57
| 30.227273
| 0.81305
| 0
| 0
| 0.267857
| 0
| 0
| 0.177355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0.089286
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f80409abd20022882a95d524c2584bb72123403a
| 533
|
py
|
Python
|
Perfect Squares.py
|
ngdeva99/Fulcrum
|
3a5c69005bbaf2a5aebe13d1907f13790210fb32
|
[
"MIT"
] | null | null | null |
Perfect Squares.py
|
ngdeva99/Fulcrum
|
3a5c69005bbaf2a5aebe13d1907f13790210fb32
|
[
"MIT"
] | null | null | null |
Perfect Squares.py
|
ngdeva99/Fulcrum
|
3a5c69005bbaf2a5aebe13d1907f13790210fb32
|
[
"MIT"
] | null | null | null |
class Solution:
def numSquares(self, n: int) -> int:
if n==0:
return 0
dp = [float('inf')]*(n+1)
dp[0] = 0
c = n
n = int(sqrt(n))
a = [i**2 for i in range(1,n+1)]
for i in range(1,len(dp)):
for j in a:
if i-j>=0:
dp[i] = min(dp[i-j]+1,dp[i])
print(dp)
if dp[n]==float('inf'):
return -1
return dp[c]
| 22.208333
| 48
| 0.320826
| 73
| 533
| 2.342466
| 0.356164
| 0.052632
| 0.070175
| 0.128655
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.534709
| 533
| 23
| 49
| 23.173913
| 0.641129
| 0
| 0
| 0
| 0
| 0
| 0.011257
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0
| 0
| 0.294118
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f80ccbd3e3b59f33892aafb3cc6b1f95f360dd40
| 1,631
|
py
|
Python
|
test_csv_write.py
|
wandyrandy/Groupme-Group-Stats-Report
|
25a59b715a7555540695639de81db390f09eb122
|
[
"MIT"
] | 2
|
2019-08-13T21:50:32.000Z
|
2019-08-14T00:49:29.000Z
|
test_csv_write.py
|
wandyrandy/Groupme-Group-Stats-Report
|
25a59b715a7555540695639de81db390f09eb122
|
[
"MIT"
] | null | null | null |
test_csv_write.py
|
wandyrandy/Groupme-Group-Stats-Report
|
25a59b715a7555540695639de81db390f09eb122
|
[
"MIT"
] | null | null | null |
import csv
import person
from random import randrange
headers = ['Name', 'Messages', 'Char Count', 'Likes Given', 'Likes Received', 'Image URL']
#tester code
people = ['bob', 'joe', 'gmo']
bob = person.Person(111, 'bob', 'www.bob.com', people)
joe = person.Person(222, 'joe', 'www.joe.com', people)
gmo = person.Person(333, 'gmo', 'www.gmo.com', people)
members = [bob, joe, gmo]
bob.msgs = randrange(40)
bob.likes_given = randrange(40)
bob.likes_received = randrange(40)
bob.chars = randrange(40)
bob.friends['gmo'] = randrange(40)
bob.friends['joe'] = randrange(40)
bob.friends['bob'] = randrange(40)
joe.msgs = randrange(40)
joe.likes_given = randrange(40)
joe.likes_received = randrange(40)
joe.chars = randrange(40)
joe.friends['gmo'] = randrange(40)
joe.friends['joe'] = randrange(40)
joe.friends['bob'] = randrange(40)
gmo.msgs = randrange(40)
gmo.likes_given = randrange(40)
gmo.likes_received = randrange(40)
gmo.chars = randrange(40)
gmo.friends['gmo'] = randrange(40)
gmo.friends['joe'] = randrange(40)
gmo.friends['bob'] = randrange(40)
# loop through the list of members and add their names to the headers
for member in members:
headers.append(member.name)
with open('raw_groupme_data.csv', 'w') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(headers)
for member in members:
row = [member.name, member.msgs, member.chars, member.likes_given,
member.likes_received, member.image_url]
for friend in member.friends:
row.append(member.friends[friend])
csv_writer.writerow(row)
| 31.980392
| 91
| 0.676272
| 230
| 1,631
| 4.726087
| 0.252174
| 0.212512
| 0.090156
| 0.057958
| 0.045998
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037862
| 0.174126
| 1,631
| 51
| 92
| 31.980392
| 0.769117
| 0.048437
| 0
| 0.04878
| 0
| 0
| 0.103264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.073171
| 0
| 0.073171
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f818e9acfc35ef6a4d51efdba0e1aa6dcf47703d
| 399
|
py
|
Python
|
examples/connect_to_wifi.py
|
flaiming/TechFurMeet-Micropython
|
00ff427429dfc186e33aa5e77bafe39eb820b854
|
[
"MIT"
] | 1
|
2018-01-19T12:05:32.000Z
|
2018-01-19T12:05:32.000Z
|
examples/connect_to_wifi.py
|
flaiming/TechFurMeet-Micropython
|
00ff427429dfc186e33aa5e77bafe39eb820b854
|
[
"MIT"
] | null | null | null |
examples/connect_to_wifi.py
|
flaiming/TechFurMeet-Micropython
|
00ff427429dfc186e33aa5e77bafe39eb820b854
|
[
"MIT"
] | null | null | null |
import network
import time
# deactivate AP
ap = network.WLAN(network.AP_IF)
ap.active(False)
# activate static network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
# connect to local WIFI
wlan.connect('TFM-Attendees')
# wait until connected
while not wlan.isconnected():
print('connecting...')
time.sleep(1)
print('Connected!')
print('Current network config:', wlan.ifconfig())
| 19
| 49
| 0.736842
| 55
| 399
| 5.309091
| 0.581818
| 0.113014
| 0.184932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002874
| 0.12782
| 399
| 20
| 50
| 19.95
| 0.836207
| 0.200501
| 0
| 0
| 0
| 0
| 0.187898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f820475f96913877c23f5aa594fcc87cf676cc00
| 1,296
|
py
|
Python
|
src/api_status_monitor/consumer/database_connection.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
src/api_status_monitor/consumer/database_connection.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
src/api_status_monitor/consumer/database_connection.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
"""The database connection manager.
"""
import logging
import psycopg2
class DatabaseConnection():
"""Database connection manager.
"""
def __init__(self, host, port, user, dbname, password, sslmode):
self._conn = None
self._host = host
self._port = port
self._user = user
self._dbname = dbname
self._password = password
self._sslmode = "require" if sslmode else None
def get_connection(self):
if not self._conn or self._conn.closed:
try:
self._conn = psycopg2.connect(dbname=self._dbname,
user=self._user,
host=self._host,
port=self._port,
password=self._password,
sslmode=self._sslmode)
except Exception:
logging.error("Unable to connect to PostgreSQL database.", exc_info=1)
self._conn = None
return self._conn
def close(self):
try:
if self._conn:
self._conn.close()
except Exception:
logging.warning("Database connection close failed.")
| 31.609756
| 86
| 0.500772
| 119
| 1,296
| 5.235294
| 0.352941
| 0.102729
| 0.080257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004
| 0.421296
| 1,296
| 40
| 87
| 32.4
| 0.826667
| 0.050926
| 0
| 0.2
| 0
| 0
| 0.066502
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.1
| 0.066667
| 0
| 0.233333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f830e618925548200af372e7691ce927a36784c1
| 867
|
py
|
Python
|
registry/setup.py
|
fjrmoreews/bioshadock_client
|
26a1de6e130689b6385144253525c861d2a2199d
|
[
"Apache-2.0"
] | 1
|
2015-11-25T19:03:58.000Z
|
2015-11-25T19:03:58.000Z
|
registry/setup.py
|
fjrmoreews/bioshadock_client
|
26a1de6e130689b6385144253525c861d2a2199d
|
[
"Apache-2.0"
] | 2
|
2015-11-24T14:45:44.000Z
|
2015-11-26T15:28:30.000Z
|
registry/setup.py
|
fjrmoreews/bioshadock_client
|
26a1de6e130689b6385144253525c861d2a2199d
|
[
"Apache-2.0"
] | 1
|
2015-11-27T10:57:15.000Z
|
2015-11-27T10:57:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
# name of the lib
name='bioshadock_biotools',
# version
version='1.0.1',
packages=find_packages(),
author="Francois Moreews",
description="Import tool for biotools from Dockerfile",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"Development Status :: 5 - Production/Stable",
"License :: Apache 2.0",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Topic :: Communications",
],
scripts = [
'parseDockerFile.py',
'registryClient.py'
],
install_requires = [
'lxml',
'requests>=2.7.0'
],
license="Apache 2.0",
)
| 18.847826
| 59
| 0.575548
| 86
| 867
| 5.732558
| 0.709302
| 0.048682
| 0.10142
| 0.060852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.289504
| 867
| 45
| 60
| 19.266667
| 0.777597
| 0.076125
| 0
| 0.111111
| 0
| 0
| 0.451005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.074074
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f83913edc4b000ba4986205d63145c52269b4655
| 1,252
|
py
|
Python
|
utils.py
|
rsoorajs/deecubes-telegram-bot
|
223710eb117c1333fefcff22bcf473e89e41c769
|
[
"MIT"
] | 2
|
2017-10-08T19:02:01.000Z
|
2020-05-16T21:55:18.000Z
|
utils.py
|
rsoorajs/deecubes-telegram-bot
|
223710eb117c1333fefcff22bcf473e89e41c769
|
[
"MIT"
] | null | null | null |
utils.py
|
rsoorajs/deecubes-telegram-bot
|
223710eb117c1333fefcff22bcf473e89e41c769
|
[
"MIT"
] | 3
|
2018-08-05T18:36:58.000Z
|
2020-05-16T21:55:19.000Z
|
import logging
from functools import wraps
from PIL import Image, ImageFont, ImageDraw
from config import LIST_ALLOWED_USERS
def restricted(func):
@wraps(func)
def wrapped(_, bot, update, *args, **kwargs):
user_id = update.effective_user.id
if LIST_ALLOWED_USERS:
if user_id not in LIST_ALLOWED_USERS:
logging.error("Unauthorized access denied for {}.".format(user_id))
return
return func(_, bot, update, *args, **kwargs)
return wrapped
def text2jpg(text, fullpath, color="#000", bgcolor="#FFF"):
font = ImageFont.load_default()
leftpadding = 3
rightpadding = 3
lines = text.split('\n')
char_width, line_height = font.getsize(text)
# TODO: Workaround. getsize is giving wrong width, so fix it to an approx number for now
char_width = 6
img_height = line_height * (len(lines) + 1)
char_count = 0
for line in lines:
count = len(line)
if count > char_count:
char_count = count
width = leftpadding + (char_width * char_count) + rightpadding
img = Image.new("RGBA", (width, img_height), bgcolor)
draw = ImageDraw.Draw(img)
y = 0
for line in lines:
if line:
draw.text((leftpadding, y), line, color, font=font)
y += line_height
img.save(fullpath)
| 25.04
| 90
| 0.683706
| 176
| 1,252
| 4.721591
| 0.448864
| 0.028881
| 0.057762
| 0.045728
| 0.036101
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.209265
| 1,252
| 49
| 91
| 25.55102
| 0.829293
| 0.06869
| 0
| 0.055556
| 0
| 0
| 0.041237
| 0
| 0
| 0
| 0
| 0.020408
| 0
| 1
| 0.083333
| false
| 0
| 0.111111
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8433cd21799446edb00e1ccf569de9f138f3e9c
| 3,017
|
py
|
Python
|
learning/modules/resnet/resnet_conditional.py
|
esteng/guiding-multi-step
|
3f0db0ba70b5851cc83878f4ed48cf82342a2ddf
|
[
"BSD-2-Clause"
] | null | null | null |
learning/modules/resnet/resnet_conditional.py
|
esteng/guiding-multi-step
|
3f0db0ba70b5851cc83878f4ed48cf82342a2ddf
|
[
"BSD-2-Clause"
] | null | null | null |
learning/modules/resnet/resnet_conditional.py
|
esteng/guiding-multi-step
|
3f0db0ba70b5851cc83878f4ed48cf82342a2ddf
|
[
"BSD-2-Clause"
] | null | null | null |
import torch
from torch import nn as nn
from learning.modules.blocks import ResBlock, ResBlockConditional
class ResNetConditional(nn.Module):
def __init__(self, embed_size, channels, c_out):
super(ResNetConditional, self).__init__()
self.block1 = ResBlock(channels) # RF: 5x5
self.block1a = ResBlock(channels) # RF: 9x9
self.cblock1 = ResBlockConditional(embed_size, channels) # RF: 9x9
self.block2 = ResBlock(channels) # RF: 13x13
self.block2a = ResBlock(channels) # RF: 17x17
self.cblock2 = ResBlockConditional(embed_size, channels) # RF: 17x17
self.block3 = ResBlock(channels) # RF: 21x21
self.block3a = ResBlock(channels) # RF: 25x25
self.cblock3 = ResBlockConditional(embed_size, channels) # RF: 25x25
self.block4 = ResBlock(channels) # RF: 29x29
self.block4a = ResBlock(channels) # RF: 33x33
self.cblock4 = ResBlockConditional(embed_size, channels) # RF: 33x33
self.block5 = ResBlock(channels) # RF: 37x37
self.block5a = ResBlock(channels) # RF: 41x41
self.cblock5 = ResBlockConditional(embed_size, channels) # RF: 41x41
self.block6 = ResBlock(channels) # RF: 45x45
self.block6a = ResBlock(channels) # RF: 49x49
self.cblock6 = ResBlockConditional(embed_size, channels) # RF: 49x49
self.block7 = ResBlock(channels) # RF: 53x53
self.block7a = ResBlock(channels) # RF: 57x57
self.cblock7 = ResBlockConditional(embed_size, channels) # RF: 57x57
self.block8 = ResBlock(channels) # RF: 61x61
self.block8a = ResBlock(channels) # RF: 65x65
self.cblock8 = ResBlockConditional(embed_size, channels, c_out) # RF: 65x65
def init_weights(self):
for mod in self.modules():
if hasattr(mod, "init_weights") and mod is not self:
mod.init_weights()
def forward(self, inputs, contexts):
x = self.block1(inputs)
x = self.block1a(x)
x = self.cblock1(x, contexts)
x = self.block2(x)
x = self.block2a(x)
x = self.cblock2(x, contexts)
x = self.block3(x)
x = self.block3a(x)
x = self.cblock3(x, contexts)
x = self.block4(x)
x = self.block4a(x)
x = self.cblock4(x, contexts)
x = self.block5(x)
x = self.block5a(x)
x = self.cblock5(x, contexts)
x = self.block6(x)
x = self.block6a(x)
x = self.cblock6(x, contexts)
x = self.block7(x)
x = self.block7a(x)
x = self.cblock7(x, contexts)
x = self.block8(x)
x = self.block8a(x)
x = self.cblock8(x, contexts)
return x
| 46.415385
| 86
| 0.552536
| 326
| 3,017
| 5.046012
| 0.236196
| 0.072948
| 0.175076
| 0.175076
| 0.187234
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070265
| 0.349022
| 3,017
| 65
| 87
| 46.415385
| 0.767312
| 0.077229
| 0
| 0
| 0
| 0
| 0.004348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f845c07a85b4945884e014911b73cc010e95c5c2
| 802
|
py
|
Python
|
problems/203_remove-linked-list-elements.py
|
okuda-seminar/review_leetcode
|
9774dbb85b836c3ebab4b24d77774ed05abb7a32
|
[
"MIT"
] | null | null | null |
problems/203_remove-linked-list-elements.py
|
okuda-seminar/review_leetcode
|
9774dbb85b836c3ebab4b24d77774ed05abb7a32
|
[
"MIT"
] | 170
|
2021-05-11T14:03:05.000Z
|
2021-11-30T14:22:52.000Z
|
problems/203_remove-linked-list-elements.py
|
ryuji0123/review_leetcode
|
9774dbb85b836c3ebab4b24d77774ed05abb7a32
|
[
"MIT"
] | null | null | null |
#
# @lc app=leetcode id=203 lang=python3
#
# [203] Remove Linked List Elements
#
# @lc code=start
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def removeElements(self, head: ListNode, val: int) -> ListNode:
"""
time: O(len(head))
space: O(len(head))
"""
if not head:
return None
current_node = head
while current_node and current_node.next:
if current_node.next.val == val:
current_node.next = current_node.next.next
else:
current_node = current_node.next
if head.val == val:
head = head.next
return head
# @lc code=end
| 23.588235
| 67
| 0.55985
| 99
| 802
| 4.414141
| 0.424242
| 0.201373
| 0.171625
| 0.077803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015009
| 0.335411
| 802
| 33
| 68
| 24.30303
| 0.804878
| 0.34788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f84ba2d7e5aa592c0ac62dbc711d229b2f13adeb
| 848
|
py
|
Python
|
vpc_hyp2/Createservers.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
vpc_hyp2/Createservers.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
vpc_hyp2/Createservers.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
import os
import sys
import json
import ipaddress
import paramiko
def func_createcont(br,r,IP):
print("\nMaking containers "+r)
print("sudo docker run -itd --cap-add=NET_ADMIN --name "+r+" main-vm")
os.system("sudo docker run -itd --cap-add=NET_ADMIN --name "+r+" main-vm")
print("ovs-docker add-port "+br+" brock "+r)
os.system("ovs-docker add-port "+br+" brock "+r)
print("sudo docker exec -it "+r+" ip route del default")
os.system("sudo docker exec -it "+r+" ip route del default")
print("sudo docker exec -it "+r+" dhclient brock")
os.system("sudo docker exec -it "+r+" dhclient brock")
print("sudo docker exec -it "+r+" ip addr add "+IP+"/24 dev brock")
os.system("sudo docker exec -it "+r+" ip addr add "+IP+"/24 dev brock")
br=sys.argv[1]
r=sys.argv[2]
IP=sys.argv[3]
func_createcont(br,r,IP)
| 33.92
| 78
| 0.650943
| 146
| 848
| 3.753425
| 0.294521
| 0.145985
| 0.153285
| 0.175182
| 0.75
| 0.680657
| 0.671533
| 0.49635
| 0.412409
| 0.288321
| 0
| 0.009972
| 0.17217
| 848
| 24
| 79
| 35.333333
| 0.770655
| 0
| 0
| 0
| 0
| 0
| 0.514151
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.238095
| 0
| 0.285714
| 0.285714
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f858848401df27fd04f2c1792b618ab879328af0
| 1,112
|
py
|
Python
|
siqbal/siqbal/doctype/item_label/item_label.py
|
smehata/siqbal
|
8b6a21fb63c050237593c49757065198c0e2c54a
|
[
"MIT"
] | 1
|
2021-08-07T12:48:02.000Z
|
2021-08-07T12:48:02.000Z
|
siqbal/siqbal/doctype/item_label/item_label.py
|
smehata/siqbal
|
8b6a21fb63c050237593c49757065198c0e2c54a
|
[
"MIT"
] | null | null | null |
siqbal/siqbal/doctype/item_label/item_label.py
|
smehata/siqbal
|
8b6a21fb63c050237593c49757065198c0e2c54a
|
[
"MIT"
] | 4
|
2021-01-16T06:14:58.000Z
|
2022-02-07T06:36:41.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, RC and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class ItemLabel(Document):
def on_submit(self):
for d in self.get("items"):
# Check for Price list if it dosent exsist. create a new one. # Get Item Price name
item_price_name = frappe.db.get_value("Item Price", {"item_code": d.item_code,"price_list": d.price_list},"name")
if not item_price_name:
self.make_item_price(d.item_code,d.price_list,d.item_price)
else :
old_item_price = frappe.db.get_value("Item Price", {"name": item_price_name},"price_list_rate")
# update Item Price if it's available and
if(old_item_price != d.item_price):
frappe.db.set_value("Item Price", item_price_name, "price_list_rate", d.item_price)
def make_item_price(self,item, price_list_name, item_price):
frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list_name,
"item_code": item,
"price_list_rate": item_price
}).insert(ignore_permissions=True)
| 37.066667
| 116
| 0.732914
| 177
| 1,112
| 4.322034
| 0.361582
| 0.247059
| 0.101961
| 0.044444
| 0.172549
| 0.172549
| 0
| 0
| 0
| 0
| 0
| 0.005269
| 0.146583
| 1,112
| 29
| 117
| 38.344828
| 0.800843
| 0.207734
| 0
| 0
| 0
| 0
| 0.163616
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.15
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8683ceaf922240bb0a9b5391ea9deb94effc25d
| 253
|
py
|
Python
|
programming/python_in_high_performance_computing/cyt_modules/cyt_setup.py
|
carlosevmoura/courses-notes
|
dc938625dd79267f9a262e7e6939205f63dda885
|
[
"MIT"
] | null | null | null |
programming/python_in_high_performance_computing/cyt_modules/cyt_setup.py
|
carlosevmoura/courses-notes
|
dc938625dd79267f9a262e7e6939205f63dda885
|
[
"MIT"
] | null | null | null |
programming/python_in_high_performance_computing/cyt_modules/cyt_setup.py
|
carlosevmoura/courses-notes
|
dc938625dd79267f9a262e7e6939205f63dda885
|
[
"MIT"
] | null | null | null |
from distutils.core import Extension, setup
from Cython.Build import cythonize
from Cython.Compiler import Options
Options.docstrings = False
ext = Extension(name="cyt_module", sources=["cyt_module.pyx"])
setup(
ext_modules = cythonize(ext),
)
| 18.071429
| 62
| 0.766798
| 33
| 253
| 5.787879
| 0.606061
| 0.104712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134387
| 253
| 13
| 63
| 19.461538
| 0.872146
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
f873731d39e77de62eb053df48244e290afd54de
| 1,038
|
py
|
Python
|
py/LSS/imaging/veto_masks/lrg/lrg_wise_mask_v1.py
|
echaussidon/LSS
|
205ce48a288acacbd41358e6d0215f4aff355049
|
[
"BSD-3-Clause"
] | null | null | null |
py/LSS/imaging/veto_masks/lrg/lrg_wise_mask_v1.py
|
echaussidon/LSS
|
205ce48a288acacbd41358e6d0215f4aff355049
|
[
"BSD-3-Clause"
] | null | null | null |
py/LSS/imaging/veto_masks/lrg/lrg_wise_mask_v1.py
|
echaussidon/LSS
|
205ce48a288acacbd41358e6d0215f4aff355049
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import division, print_function
import sys, os, glob, time, warnings, gc
# import matplotlib.pyplot as plt
import numpy as np
from astropy.table import Table, vstack, hstack
import fitsio
from astropy.io import fits
from scipy.interpolate import interp1d
output_path = '/global/cfs/cdirs/desi/users/rongpu/desi_mask/w1_bright-2mass-lrg_mask_v1.fits'
# WISE mask
w1_mags = [0, 0.5, 1, 1.5, 2, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0]
w1_radii = [600, 600, 550, 500, 475, 425, 400, 400, 390, 392.5, 395, 370, 360, 330, 275, 240, 210, 165, 100, 75, 60]
w1_max_mag = 10.0
f_radius = interp1d(w1_mags, w1_radii, bounds_error=False, fill_value='extrapolate')
wise_path = '/global/cfs/cdirs/desi/users/rongpu/desi_mask/w1_bright-2mass-13.3-dr9.fits'
wise = Table(fitsio.read(wise_path))
# print(len(wise))
wise['w1ab'] = np.array(wise['W1MPRO']) + 2.699
mask = wise['w1ab']<w1_max_mag
wise['radius'] = 0.
wise['radius'][mask] = f_radius(wise['w1ab'][mask])
wise.write(output_path)
| 33.483871
| 116
| 0.706166
| 198
| 1,038
| 3.565657
| 0.474747
| 0.025496
| 0.036827
| 0.050992
| 0.152975
| 0.152975
| 0.152975
| 0.152975
| 0.152975
| 0.152975
| 0
| 0.144114
| 0.124277
| 1,038
| 30
| 117
| 34.6
| 0.632563
| 0.055877
| 0
| 0
| 0
| 0.105263
| 0.19877
| 0.156762
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.368421
| 0
| 0.368421
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
f875e138fd658884c3bfbd92197a369b04338ea0
| 4,590
|
py
|
Python
|
cembot/languages/EN.py
|
niksart/cembot
|
99ec3067bde5b8b72053dd18caa18742afba6a5e
|
[
"MIT"
] | null | null | null |
cembot/languages/EN.py
|
niksart/cembot
|
99ec3067bde5b8b72053dd18caa18742afba6a5e
|
[
"MIT"
] | 15
|
2018-08-30T13:56:27.000Z
|
2021-07-21T08:58:03.000Z
|
cembot/languages/EN.py
|
niksart/cembot
|
99ec3067bde5b8b72053dd18caa18742afba6a5e
|
[
"MIT"
] | null | null | null |
# Support for english (EN) language
def missing_translation(tr_id):
return "MISSING TRANSLATION FOR STRING ID '" + str(tr_id) + "'"
helper_commands = {
"AUTHORIZE": "Usage:\n/authorize @<username>\n/authorize <user id>",
"DEAUTHORIZE": "Usage:\n/deauthorize @<username>\n/deauthorize <user id>",
"GIVEN": "Usage:\n/given <amount> @<username> <description>",
"SPENT": "Usage:\n/spent <amount> <description>.\nPayees are all the members of the group, including the payer.",
"MYID": "Usage: /myid\nshow your user id, useful if you have no username",
"START": "Show the initial message",
"LAST_GROUP_EXPENSES": "See the last expenses in a group. \n"
"Usage:\n"
"▪️ /last_expenses (show max 5 expenses)\n"
"▪️ /last_expenses <n max expenses to show>",
"LAST_CHARGES": "Use this command in private chat to see the last charges on your cembot account. \n"
"Usage:\n"
"▪️ /last_charges (show max 5 charges)\n"
"▪️ /last_charges <n max charges to show>",
"LAST_LOANS": "Use this command in private chat to see the last loans you did \n"
"Usage:\n"
"▪️ /last_loans (show max 5 loans)\n"
"▪️ /last loans <n max loans to show>"
}
info = {
"start": missing_translation("start"),
"guide": missing_translation("start"),
"introduced_in_group": "Hello everyone!\nI'm cembot, and I'll help you administrating your expenses!\n"
"Each member of this group now should introduce yourself. "
"People added after this message can avoid to introduce themselves.\n"
"Do it with the command /hereIam",
"each_member_introduced": missing_translation("each_member_introduced"),
"person_missing": "1 person is missing.",
"people_missing": " people are missing.",
"transaction_succeed": "Transaction added successfully!",
"authorized_confirm(user)": "User @%s has been authorized.",
"deauthorized_confirm(user)": "The authorization of user @%s has been revoked.",
"your_id_is(id)": "Your Telegram id is %s. You can add in Telegram settings an username and use cembot more easily.",
"balance_with_other_user(user,balance)": "Your balance with the user %s is %s",
"header_balance_credit": "📗 Credits\n",
"header_balance_debit": "📕 Debits\n",
"commands": missing_translation("commands"),
"these_are_the_last_group_expenses": missing_translation("these_are_the_last_group_expenses"),
"these_are_the_last_individual_charges": missing_translation("these_are_the_last_individual_charges"),
"these_are_the_last_group_charges": missing_translation("these_are_the_last_group_charges"),
"no_charges_yet": missing_translation("no_charges_yet"),
"these_are_the_last_individual_loans": missing_translation("these_are_the_last_individual_loans"),
"these_are_the_last_group_loans": missing_translation("these_are_the_last_group_loans")
}
error = {
"command_unavailable_for_private": "For using this command open a private chat with @en_cembot.",
"command_unavailable_for_group": "For using this command add @en_cembot in a group.",
"amount_money_not_valid": "Amount of money not valid.",
"waiting_for_all_users": "Someone did not present themselves yet.\n"
"Present yourself with /hereIam before adding expenses.",
"lack_of_authorization(user)": "The user @%s has not authorized you for charging expenses.",
"user_unregistered(user)": "The user @%s that you want to add as a payee is not registered on our system",
"can't_deauthorize_cause_not_authorized_yet": "You have not already authorized this user. You can't deauthorize it.",
"have_authorized_yet_this_user": "You have already authorized this user.",
"maybe_you_wrote_an_username_instead_id": "This is not a numeric id. If you intended to write an username write it with a @ at the beginning.",
"insert_a_correct_number": "Insert a correct number and retry"
}
# commands
private_commands = {
"start": "START",
"commands": "COMMANDS",
"authorize": "AUTHORIZE",
"revoke": "DEAUTHORIZE",
"given": "GIVEN",
"myid": "MYID",
"balance": "BALANCE",
"last_charges": "LAST_CHARGES",
"last_loans": "LAST_LOANS",
"guide": "GUIDE"
}
group_commands = {
"spent": "SPENT",
"spent@en_cembot": "SPENT", # version with @[language]_cembot
"hereIam": "PRESENTATION",
"hereIam@en_cembot": "PRESENTATION", # version with @[language]_cembot
"last_expenses": "LAST_GROUP_EXPENSES",
"last_expenses@en_cembot": "LAST_GROUP_EXPENSES", # version with @[language]_cembot
}
| 49.891304
| 144
| 0.70305
| 628
| 4,590
| 4.921975
| 0.265924
| 0.02944
| 0.035587
| 0.048528
| 0.158848
| 0.146231
| 0.097703
| 0.025235
| 0.025235
| 0.025235
| 0
| 0.001053
| 0.172331
| 4,590
| 91
| 145
| 50.43956
| 0.808897
| 0.030065
| 0
| 0.0375
| 0
| 0.0375
| 0.735268
| 0.194332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0125
| false
| 0
| 0
| 0.0125
| 0.025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f885cb85cd328a59b1d3f0d46e987b871f1a5d6d
| 1,977
|
py
|
Python
|
apiser/10-grpc/src/utils/tools/zemail.py
|
hyhlinux/demo_vue
|
cf61d0ba21cce93b04951076c8c23c0fe693bb5b
|
[
"Apache-2.0"
] | null | null | null |
apiser/10-grpc/src/utils/tools/zemail.py
|
hyhlinux/demo_vue
|
cf61d0ba21cce93b04951076c8c23c0fe693bb5b
|
[
"Apache-2.0"
] | 2
|
2022-02-10T12:00:22.000Z
|
2022-03-02T02:31:40.000Z
|
apiser/10-grpc/src/utils/tools/zemail.py
|
hyhlinux/demo_vue
|
cf61d0ba21cce93b04951076c8c23c0fe693bb5b
|
[
"Apache-2.0"
] | null | null | null |
import smtplib
import os
from email.mime.text import MIMEText
from email.utils import formataddr
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
try:
from src.config import CONFIG
except ImportError:
class CONFIG:
EMAIL = {
"user": os.getenv('EMAIL_USER', ""),
"passwd": os.getenv("EMAIL_USER_PASSWD", ""),
}
class SendEmail(object):
hosts = "smtp.exmail.qq.com"
host_port = 465
def __init__(self, user_email, user_pwd, tls=False):
print("conf:", user_email, user_pwd)
self.user = user_email
self.user_pass = user_pwd
self.server = smtplib.SMTP_SSL(host=SendEmail.hosts, port=SendEmail.host_port)
self.server.debuglevel = 1
self.server.ehlo()
if tls:
self.server.starttls()
def login(self):
self.server = smtplib.SMTP_SSL(host=SendEmail.hosts, port=SendEmail.host_port)
self.server.ehlo()
self.server.login(self.user, self.user_pass)
def sendEmail(self, to, msg):
try:
msg = self.body(msg=msg)
self.login() # 括号中对应的是发件人邮箱账号、邮箱密码
self.server.sendmail(from_addr=self.user, to_addrs=to, msg=msg) # 括号中对应的是发件人邮箱账号、收件人邮箱账号、发送邮件
self.logout() # 关闭连接
except Exception as e: # 如果 try 中的语句没有执行,则会执行下面的 ret=False
print(e)
def body(self, subject=None, msg=None):
msg = MIMEText(msg, 'html', 'utf-8')
msg['From'] = formataddr(["FromTES", self.user]) # 括号里的对应发件人邮箱昵称、发件人邮箱账号
if not subject:
subject = '邮件激活'
msg['Subject'] = subject
return msg.as_string()
def logout(self):
self.server.quit()
Zemail = SendEmail(CONFIG.EMAIL.get("user"), CONFIG.EMAIL.get("passwd"))
def main():
Zemail.sendEmail("[email protected]", "")
if __name__ == '__main__':
main()
| 29.954545
| 107
| 0.630754
| 250
| 1,977
| 4.864
| 0.352
| 0.074013
| 0.042763
| 0.027961
| 0.223684
| 0.185855
| 0.185855
| 0.185855
| 0.120066
| 0.120066
| 0
| 0.010081
| 0.247344
| 1,977
| 65
| 108
| 30.415385
| 0.807124
| 0.054628
| 0
| 0.148148
| 0
| 0
| 0.067633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0.074074
| 0.185185
| 0
| 0.407407
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f891f4ca2c23bac0817312243666f8fd196ddfcf
| 9,970
|
py
|
Python
|
selinum_basics.py
|
elithaxxor/craiglist_scraper
|
db35d06004e306229cd10d7678574763cf48c625
|
[
"MIT"
] | null | null | null |
selinum_basics.py
|
elithaxxor/craiglist_scraper
|
db35d06004e306229cd10d7678574763cf48c625
|
[
"MIT"
] | null | null | null |
selinum_basics.py
|
elithaxxor/craiglist_scraper
|
db35d06004e306229cd10d7678574763cf48c625
|
[
"MIT"
] | null | null | null |
import os
import re
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver import Chrome
from selenium.webdriver.support.expected_conditions import presence_of_element_located
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
## ORIGINAL CODE ###
# OS = os.name
# # s.environ['PATH'] += '/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/fileexteniontest.torrenttorrent.torrent'
# driver = webdriver.Chrome(r'/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/fileexteniontest.torrenttorrent.torrent/chromedriver')
# driver.get('https://1337x.to/')
## To Load Extensions::
try:
OS = os.name
chrome_options = Options()
chrome_options.add_extension('/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/Selenium_Project/ad_blocker.crx')
driver = webdriver.Chrome(options=chrome_options, executable_path= r'/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/fileexteniontest.torrenttorrent.torrent/chromedriver')
time.sleep(2)
driver.get('https://1337x.to/')
driver.implicitly_wait(25) ### no need to call more than once
print(OS)
print(driver)
#print(driver.text)
except Exception as e:
print('ERROR IN PARSING CHROME EXTENSION', str(e))
try:
search_box = driver.find_element_by_id('autocomplete')
print(search_box.text)
search_box.click()
search_box.send_keys('chopper')
click_search_box = driver.find_element_by_class_name('flaticon-search')
#click_seach_box.click()
#click_search_box.send_keys(Keys.ENTER)
search_box.send_keys(Keys.ENTER)
#driver.find_element_by_xpath("html/xxxxx").send_keys('keys.ENTER')
except Exception as e:
print('Element not found CANNOT FIND SEARCH BOX ', str(e))
try:
search_box01 = driver.find_element_by_id('autocomplete')
print(search_box01.text)
search_box01.click()
search_box01.send_keys(Keys.CONTROL, "a")
search_box01.clear()
search_box01.send_keys('the titanic')
search_box01.send_keys(Keys.ENTER)
except Exception as e:
print('Element not found 2nd search', str(e))
### IMPLIMENT EXPLICIT WAIT
## SINCE THE WEBPAGE MAY TAKE LONG TO LOAD, AND TIME TO PARSE, SET UP AN EXPLICIT WAIT--> THIS WILL WAIT UNTIL THE DEFINED THING IS LOADED
## SET UP LOOP TO ITERATE THROUGH LIST OF ELEMENTS
try:
body = WebDriverWait(driver, 15).until(
EC.presence_of_element_located((By.CLASS_NAME, 'table-list-wrap'))
#EC.presence_of_all_elements_located((by.CLASS, 'table-list table table-responsive table-striped')) ##
)
print(body.text)
print(),print()
print('1111111111')
href_link = body.find_element_by_xpath("/html/body/main/div/div/div/div[2]/div[1]/table/tbody/tr[1]/td[1]")
print(href_link.text)
except Exception as e:
print('Element not found body search', str(e))
try:
click_link = driver.find_element_by_link_text('The Titanic Secret by Clive Cussler, Jack Du Brul EPUB')
print(click_link.text)
click_link.click()
except Exception as e:
print('Element not found click test', str(e))
try:
# magnet = driver.find_element
magnet_pull =WebDriverWait(driver, 15).until(
EC.presence_of_element_located((By.CLASS_NAME, "l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l8680f3a1872d2d50e0908459a4bfa4dc04f0e610"))
)
print('magnetpull info')
print(magnet_pull.text)
magnet_link = driver.find_element_by_xpath("/html/body/main/div/div/div/div[2]/div[1]/ul[1]/li[1]/a")
print(magnet_link.text)
magnet_link.click()
except Exception as e:
print('MAGNET PULL ERROR', str(e))
driver.quit()
###### GOOOD CODE ######
##### TO LOOP THROUGH A LIST WHILE IN IMPLICIT WAIT
# sm_table = body.find_element_by_class_name('"table-list table table-responsive table-striped"')
# # sm_table = body.find_element_by_class_name('coll-1 name')
# #sm_table = body.find_element_by_xpath("/html/body/main/div/div/div/div[2]/div[1]/table/tbody/tr[1]/td[1]")
#
# for cell in sm_table:
# href_link = cell.find_element_by_xpath("/html/body/main/div/div/div/div[2]/div[1]/table/tbody/tr[1]/td[1]")
# print(href_link.text)
## ORIGINAL CODE ###
# OS = os.name
# # s.environ['PATH'] += '/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/fileexteniontest.torrenttorrent.torrent'
# driver = webdriver.Chrome(r'/Users/macbook/Documents/CS/PROJECT/AutoDownloader/TEST_DOWNLOADS/fileexteniontest.torrenttorrent.torrent/chromedriver')
# driver.get('https://1337x.to/')
#################### EXPLICIT WAIT ###########################
###### USE WHEN DOWNLOAD COMPLETES ######### (23:00)
#### use when you want to wait some to for executution
## explicit wait -- waits until condition is returned true.
## driver, 30 --> how long to wait till true
# ## use body class to find element
# ## nest elements in a tuple
# print(f"my_element")
# WebDriverWait(driver, 30).until(
# EC.text_to_b_present_in_element(
# (by.CLASS_NAME, 'progress-label'),## element filtration (class name, class name vaue as a tuple
# 'complete' ## expected text as a string
#
# )
#
# )
# my_element00 = driver.find_element_by_class_name('') ## <--- pass in class value #-> class styling method
# print(my_element00)
#
# #### DROP DOWN CLASSES FOR MAGNET / TORRENT DOWNLOAD ##
# <ul class="lfa750b508ad7d04e3fc96bae2ea94a5d121e6607 lcafae12a818cf41a5873ad374b98e79512c946c6">
# <li><a class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l8680f3a1872d2d50e0908459a4bfa4dc04f0e610" href="magnet:?xt=urn:btih:F5BC20E9AA709CFC32BE63B2F6BEE56882EB7BD2&dn=The+Titanic+Secret+by+Clive+Cussler%2C+Jack+Du+Brul+EPUB&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969%2Fannounce&tr=udp%3A%2F%2F9.rarbg.to%3A2710%2Fannounce&tr=udp%3A%2F%2Fexodus.desync.com%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.uw0.xyz%3A6969%2Fannounce&tr=udp%3A%2F%2Fopen.stealth.si%3A80%2Fannounce&tr=udp%3A%2F%2Ftracker.tiny-vps.com%3A6969%2Fannounce&tr=udp%3A%2F%2Fopen.demonii.si%3A1337%2Fannounce&tr=udp%3A%2F%2Ftracker.nibba.trade%3A1337%2Fannounce&tr=udp%3A%2F%2Fopentracker.sktorrent.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fexplodie.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fbt.xxx-tracker.com%3A2710%2Fannounce&tr=udp%3A%2F%2Fzephir.monocul.us%3A6969%2Fannounce&tr=udp%3A%2F%2Famigacity.xyz%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.zer0day.to%3A1337%2Fannounce&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fcoppersurfer.tk%3A6969%2Fannounce" onclick="javascript: count(this);"><span class="icon"><i class="flaticon-ld08a4206c278863eddc1bf813faa024ef55ce0ef"></i></span>Magnet Download</a> </li>
# <li class="dropdown">
# <a data-toggle="dropdown" class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c le41399670fcf7cac9ad72cbf1af20d76a1fa16ad" onclick="javascript: count(this);" href="#"><span class="icon"><i class="flaticon-le9f40194aef2ed76d8d0f7f1be7fe5aad6fce5e6"></i></span>Torrent Download</a>
# <ul class="dropdown-menu" aria-labelledby="dropdownMenu1">
# <li><a class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l13bf8e2d22d06c362f67b795686b16d022e80098" target="_blank" href="http://itorrents.org/torrent/F5BC20E9AA709CFC32BE63B2F6BEE56882EB7BD2.torrent"><span class="icon"><i class="flaticon-lbebff891414215bfc65d51afbd7677e45be19fad"></i></span>ITORRENTS MIRROR</a> </li>
# <li><a class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l13bf8e2d22d06c362f67b795686b16d022e80098" target="_blank" href="http://torrage.info/torrent.php?h=F5BC20E9AA709CFC32BE63B2F6BEE56882EB7BD2"><span class="icon"><i class="flaticon-lbebff891414215bfc65d51afbd7677e45be19fad"></i></span>TORRAGE MIRROR</a></li>
# <li><a class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l13bf8e2d22d06c362f67b795686b16d022e80098" target="_blank" href="http://btcache.me/torrent/F5BC20E9AA709CFC32BE63B2F6BEE56882EB7BD2"><span class="icon"><i class="flaticon-lbebff891414215bfc65d51afbd7677e45be19fad"></i></span>BTCACHE MIRROR</a></li>
# <li><a class="l4702248fa49fbaf25efd33c5904b4b3175b29571 l0e850ee5d16878d261dd01e2486970eda4fb2b0c l8680f3a1872d2d50e0908459a4bfa4dc04f0e610" href="magnet:?xt=urn:btih:F5BC20E9AA709CFC32BE63B2F6BEE56882EB7BD2&dn=The+Titanic+Secret+by+Clive+Cussler%2C+Jack+Du+Brul+EPUB&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969%2Fannounce&tr=udp%3A%2F%2F9.rarbg.to%3A2710%2Fannounce&tr=udp%3A%2F%2Fexodus.desync.com%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.uw0.xyz%3A6969%2Fannounce&tr=udp%3A%2F%2Fopen.stealth.si%3A80%2Fannounce&tr=udp%3A%2F%2Ftracker.tiny-vps.com%3A6969%2Fannounce&tr=udp%3A%2F%2Fopen.demonii.si%3A1337%2Fannounce&tr=udp%3A%2F%2Ftracker.nibba.trade%3A1337%2Fannounce&tr=udp%3A%2F%2Fopentracker.sktorrent.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fexplodie.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fbt.xxx-tracker.com%3A2710%2Fannounce&tr=udp%3A%2F%2Fzephir.monocul.us%3A6969%2Fannounce&tr=udp%3A%2F%2Famigacity.xyz%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.zer0day.to%3A1337%2Fannounce&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969%2Fannounce&tr=udp%3A%2F%2Fcoppersurfer.tk%3A6969%2Fannounce"><span class="icon"><i class="flaticon-ld08a4206c278863eddc1bf813faa024ef55ce0ef"></i></span>None Working? Use Magnet</a></li>
#
| 57.298851
| 1,381
| 0.768907
| 1,326
| 9,970
| 5.686275
| 0.21267
| 0.022546
| 0.036074
| 0.045093
| 0.665252
| 0.649602
| 0.616313
| 0.587268
| 0.548011
| 0.518966
| 0
| 0.133326
| 0.090471
| 9,970
| 173
| 1,382
| 57.630058
| 0.698169
| 0.671214
| 0
| 0.194444
| 0
| 0.041667
| 0.259845
| 0.147837
| 0
| 0
| 0
| 0.00578
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f89ade1e452186e4d101ccde6adaccc57996d66d
| 646
|
py
|
Python
|
Automate_Whatsapp_Sending_Text.py
|
IshvinaKapoor/Automate-WhatsApp
|
f499db0540c56b74152a368af1fa361ecea69806
|
[
"MIT"
] | null | null | null |
Automate_Whatsapp_Sending_Text.py
|
IshvinaKapoor/Automate-WhatsApp
|
f499db0540c56b74152a368af1fa361ecea69806
|
[
"MIT"
] | null | null | null |
Automate_Whatsapp_Sending_Text.py
|
IshvinaKapoor/Automate-WhatsApp
|
f499db0540c56b74152a368af1fa361ecea69806
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Automate WhatsApp - Sending WhatsApp message
@author: DELL Ishvina Kapoor
"""
#importing the necessary modules
import pywhatkit as pkt
import getpass as gp
#displaying a welcome message
print("Automating Whatsapp!")
#capturing the target phone number from the user
phone_num = gp.getpass(prompt = 'Enter the phone number(with country code) : ', stream = None)
#capture the message
message = "Hi IK this side"
#call the method
#the time is in 24 hr format
pkt.sendwhatmsg(phone_num, message, 22 , 33)
#will be displayed once whatsapp is automated
print("Delivered to the target user")
| 23.925926
| 96
| 0.716718
| 91
| 646
| 5.065934
| 0.692308
| 0.039046
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.198142
| 646
| 27
| 97
| 23.925926
| 0.876448
| 0.47678
| 0
| 0
| 0
| 0
| 0.35906
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.285714
| 0.285714
| 0
| 0.285714
| 0.285714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f8a65542e1ebb18eabea4f393380c912f8314bfc
| 696
|
py
|
Python
|
network/topo-custom.py
|
kstough/pox
|
152625fcd40fc5ddfce87b7632fd40777507205c
|
[
"Apache-2.0"
] | null | null | null |
network/topo-custom.py
|
kstough/pox
|
152625fcd40fc5ddfce87b7632fd40777507205c
|
[
"Apache-2.0"
] | null | null | null |
network/topo-custom.py
|
kstough/pox
|
152625fcd40fc5ddfce87b7632fd40777507205c
|
[
"Apache-2.0"
] | null | null | null |
"""Custom topology example
s7 ---- s8 ---- s9
/ \ / \ / \
h1 h2 h3 h4 h5 h6
"""
from mininet.topo import Topo
print('Loading MyTopo')
class MyTopo(Topo):
"Simple topology example."
def __init__(self):
Topo.__init__(self)
# Add hosts and switches
h1, h2, h3, h4, h5, h6 = (self.addHost('h' + str(i + 1)) for i in range(6))
s7, s8, s9 = (self.addSwitch('s' + str(i + 7)) for i in range(3))
# Add links
self.addLink(h1, s7)
self.addLink(h2, s7)
self.addLink(s7, s8)
self.addLink(h3, s8)
self.addLink(h4, s8)
self.addLink(s8, s9)
self.addLink(h5, s9)
self.addLink(h6, s9)
topos = {'mytopo': (lambda: MyTopo())}
| 19.885714
| 79
| 0.570402
| 105
| 696
| 3.704762
| 0.428571
| 0.226221
| 0.100257
| 0.041131
| 0.061697
| 0.061697
| 0
| 0
| 0
| 0
| 0
| 0.07393
| 0.261494
| 696
| 34
| 80
| 20.470588
| 0.682879
| 0.211207
| 0
| 0
| 0
| 0
| 0.082585
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.058824
| 0
| 0.176471
| 0.058824
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8a7cc80262619abcc2b85bf1530f105f8f8ce34
| 362
|
py
|
Python
|
agri/urls.py
|
Bhavesh0327/Agriblock
|
72015e1765214b153771dbc3868eae01fe8898b3
|
[
"MIT"
] | 1
|
2020-10-01T08:28:57.000Z
|
2020-10-01T08:28:57.000Z
|
agri/urls.py
|
Bhavesh0327/Agriblock
|
72015e1765214b153771dbc3868eae01fe8898b3
|
[
"MIT"
] | 14
|
2020-06-05T20:37:13.000Z
|
2022-02-26T22:51:36.000Z
|
agri/urls.py
|
Bhavesh0327/Agriblock
|
72015e1765214b153771dbc3868eae01fe8898b3
|
[
"MIT"
] | 3
|
2020-01-29T04:34:28.000Z
|
2020-09-30T21:48:30.000Z
|
from django.urls import path
from .views import *
rest_urls = list(map(lambda x: path(x[0], x[1], name=x[2]), [
('login/', login, 'login'),
('issue_asset/', issue_asset, 'issue_asset'),
('buy/', buy, 'buy'),
('get_assets/', get_assets, 'get_assets'),
('get_transactions/', get_transactions, 'get_transactions')
]))
urlpatterns = rest_urls
| 25.857143
| 63
| 0.638122
| 49
| 362
| 4.489796
| 0.469388
| 0.136364
| 0.163636
| 0.181818
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009868
| 0.160221
| 362
| 13
| 64
| 27.846154
| 0.713816
| 0
| 0
| 0
| 0
| 0
| 0.262431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8b864241fa615529ec19943c7bf44bcc5c33cfb
| 4,274
|
py
|
Python
|
solutions/2021/day5/day5.py
|
teije01/adventofcode
|
2742985f8437e9784e7ec5430e3846a755b5d386
|
[
"MIT"
] | null | null | null |
solutions/2021/day5/day5.py
|
teije01/adventofcode
|
2742985f8437e9784e7ec5430e3846a755b5d386
|
[
"MIT"
] | null | null | null |
solutions/2021/day5/day5.py
|
teije01/adventofcode
|
2742985f8437e9784e7ec5430e3846a755b5d386
|
[
"MIT"
] | null | null | null |
"""
--- Day 5: Hydrothermal Venture ---
You come across a field of hydrothermal vents on the ocean floor! These vents constantly produce
large, opaque clouds, so it would be best to avoid them if possible.
They tend to form in lines; the submarine helpfully produces a list of nearby lines of vents (your
puzzle input) for you to review. For example:
0,9 -> 5,9
8,0 -> 0,8
9,4 -> 3,4
2,2 -> 2,1
7,0 -> 7,4
6,4 -> 2,0
0,9 -> 2,9
3,4 -> 1,4
0,0 -> 8,8
5,5 -> 8,2
Each line of vents is given as a line segment in the format x1,y1 -> x2,y2 where x1,y1 are the
coordinates of one end the line segment and x2,y2 are the coordinates of the other end. These line
segments include the points at both ends. In other words:
An entry like 1,1 -> 1,3 covers points 1,1, 1,2, and 1,3.
An entry like 9,7 -> 7,7 covers points 9,7, 8,7, and 7,7.
For now, only consider horizontal and vertical lines: lines where either x1 = x2 or y1 = y2.
So, the horizontal and vertical lines from the above list would produce the following diagram:
.......1..
..1....1..
..1....1..
.......1..
.112111211
..........
..........
..........
..........
222111....
In this diagram, the top left corner is 0,0 and the bottom right corner is 9,9. Each position is
shown as the number of lines which cover that point or . if no line covers that point. The top-left
pair of 1s, for example, comes from 2,2 -> 2,1; the very bottom row is formed by the overlapping
lines 0,9 -> 5,9 and 0,9 -> 2,9.
To avoid the most dangerous areas, you need to determine the number of points where at least two
lines overlap. In the above example, this is anywhere in the diagram with a 2 or larger - a total
of 5 points.
Consider only horizontal and vertical lines. At how many points do at least two lines overlap?
--- Part Two ---
Unfortunately, considering only horizontal and vertical lines doesn't give you the full picture;
you need to also consider diagonal lines.
Because of the limits of the hydrothermal vent mapping system, the lines in your list will only
ever be horizontal, vertical, or a diagonal line at exactly 45 degrees. In other words:
An entry like 1,1 -> 3,3 covers points 1,1, 2,2, and 3,3.
An entry like 9,7 -> 7,9 covers points 9,7, 8,8, and 7,9.
Considering all lines from the above example would now produce the following diagram:
1.1....11.
.111...2..
..2.1.111.
...1.2.2..
.112313211
...1.2....
..1...1...
.1.....1..
1.......1.
222111....
You still need to determine the number of points where at least two lines overlap. In the above
example, this is still anywhere in the diagram with a 2 or larger - now a total of 12 points.
Consider all of the lines. At how many points do at least two lines overlap?
"""
import numpy as np
class Line:
"""Line representation"""
def __init__(self, x1: int, y1: int, x2: int, y2: int):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
@classmethod
def from_puzzle_input(cls, line: str):
x1y1, x2y2 = line.split(" -> ")
return cls(*map(int, x1y1.split(",")), *map(int, x2y2.split(",")))
@property
def xmin(self):
return min(self.x1, self.x2)
@property
def xmax(self):
return max(self.x1, self.x2)
@property
def ymin(self):
return min(self.y1, self.y2)
@property
def ymax(self):
return max(self.y1, self.y2)
if __name__ == "__main__":
with open("solutions/2021/day5/input.txt", "r") as f:
lines = [Line.from_puzzle_input(line) for line in f.readlines()]
straight_field = np.zeros((1000, 1000), dtype=int)
diagonal_field = straight_field.copy()
for line in lines:
field_index = (slice(line.ymin, line.ymax + 1), slice(line.xmin, line.xmax + 1))
if line.x1 == line.x2 or line.y1 == line.y2:
straight_field[field_index] += 1
else:
is_identity = (line.x2 - line.x1 > 0) == (line.y2 - line.y1 > 0)
diag_slice = slice(None, None, None if is_identity else -1)
diagonal_field[field_index] += np.diag(np.ones((line.xmax - line.xmin + 1), dtype=int))[diag_slice]
field = straight_field + diagonal_field
print(f"Answer 1: {np.sum(straight_field > 1)}")
print(f"Answer 2: {np.sum(field > 1)}")
| 31.426471
| 111
| 0.653486
| 747
| 4,274
| 3.697456
| 0.281125
| 0.01231
| 0.010862
| 0.008689
| 0.227009
| 0.185373
| 0.144823
| 0.133961
| 0.115858
| 0.091238
| 0
| 0.071001
| 0.218999
| 4,274
| 135
| 112
| 31.659259
| 0.756441
| 0.63781
| 0
| 0.102564
| 0
| 0
| 0.07236
| 0.033246
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.025641
| 0.102564
| 0.333333
| 0.051282
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
f8bec2e6574c370927ccaaf8971ce34b58a52c44
| 497
|
py
|
Python
|
Cap_9/ex9.23/ex9.23.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | 4
|
2020-04-07T00:38:46.000Z
|
2022-03-10T03:34:42.000Z
|
Cap_9/ex9.23/ex9.23.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | null | null | null |
Cap_9/ex9.23/ex9.23.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | 1
|
2021-04-22T02:45:38.000Z
|
2021-04-22T02:45:38.000Z
|
import agenda23
agenda23.le('Agenda.txt')
while True:
opcao = agenda23.menu()
if opcao == 0:
break
elif opcao == 1:
agenda23.novo()
elif opcao == 2:
agenda23.altera()
elif opcao == 3:
agenda23.apaga()
elif opcao == 4:
agenda23.lista()
elif opcao == 5:
agenda23.grava()
elif opcao == 6:
agenda23.le()
elif opcao == 7:
agenda23.ordena()
else:
print('Opção inválida! Digite novamente.')
| 20.708333
| 50
| 0.539235
| 56
| 497
| 4.785714
| 0.571429
| 0.235075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084592
| 0.334004
| 497
| 23
| 51
| 21.608696
| 0.725076
| 0
| 0
| 0
| 0
| 0
| 0.086519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.045455
| 0
| 0.045455
| 0.045455
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8c6b59947b8e1e01fbc267420d89e101ab3f722
| 932
|
py
|
Python
|
util_test.py
|
svennickel/itunes-app-scraper
|
14b857bd40a237825cb6bd93be388e6bcd083c01
|
[
"MIT"
] | 10
|
2020-08-12T06:47:04.000Z
|
2021-12-04T03:06:19.000Z
|
util_test.py
|
svennickel/itunes-app-scraper
|
14b857bd40a237825cb6bd93be388e6bcd083c01
|
[
"MIT"
] | 5
|
2020-11-19T07:53:19.000Z
|
2022-03-16T15:06:37.000Z
|
util_test.py
|
iaine/itunes-app-scraper
|
de60c8c0b369e78d4c87a0cb11284b2ef576c090
|
[
"MIT"
] | 11
|
2020-08-12T06:47:31.000Z
|
2022-03-19T23:36:18.000Z
|
from itunes_app_scraper.util import AppStoreException, AppStoreCollections, AppStoreCategories, AppStoreUtils
import json
import pytest
import os
def test_category_exists():
category = AppStoreCategories()
assert category.BOOKS == 6018
def test_category_does_not_exist():
category = AppStoreCategories()
with pytest.raises(AttributeError, match="'AppStoreCategories' object has no attribute 'METHOD'"):
category.METHOD
def test_collection_exists():
collection = AppStoreCollections()
assert collection.NEW_IOS == 'newapplications'
def test_collection_does_not_exist():
collection = AppStoreCollections()
with pytest.raises(AttributeError, match="'AppStoreCollections' object has no attribute 'NOTHING'"):
collection.NOTHING
def test_app_utils():
utils = AppStoreUtils()
json_object = json.loads(utils.get_entries(AppStoreCollections()))
assert "names" in json_object
| 33.285714
| 109
| 0.769313
| 98
| 932
| 7.112245
| 0.438776
| 0.050215
| 0.043042
| 0.086083
| 0.10043
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005025
| 0.145923
| 932
| 28
| 110
| 33.285714
| 0.870603
| 0
| 0
| 0.181818
| 0
| 0
| 0.137192
| 0.022508
| 0
| 0
| 0
| 0
| 0.136364
| 1
| 0.227273
| false
| 0
| 0.181818
| 0
| 0.409091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f8cddb9ef6bd722c93b8a3657d1eaf9e8803d45f
| 4,525
|
py
|
Python
|
Scripts/Cutter.py
|
rhong3/CPTAC-UCEC
|
ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9
|
[
"MIT"
] | 4
|
2019-01-04T21:11:03.000Z
|
2020-12-11T16:56:15.000Z
|
Scripts/Cutter.py
|
rhong3/CPTAC-UCEC
|
ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9
|
[
"MIT"
] | null | null | null |
Scripts/Cutter.py
|
rhong3/CPTAC-UCEC
|
ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9
|
[
"MIT"
] | null | null | null |
"""
Tile svs/scn files
Created on 11/01/2018
@author: RH
"""
import time
import matplotlib
import os
import shutil
import pandas as pd
matplotlib.use('Agg')
import Slicer
import staintools
import re
# Get all images in the root directory
def image_ids_in(root_dir, mode, ignore=['.DS_Store', 'dict.csv']):
ids = []
for id in os.listdir(root_dir):
if id in ignore:
print('Skipping ID:', id)
else:
if mode == 'CPTAC':
dirname = id.split('_')[-3]
sldnum = id.split('_')[-2].split('-')[-1]
ids.append((id, dirname, sldnum))
if mode == 'TCGA':
dirname = re.split('-01Z|-02Z', id)[0]
sldnum = id.split('-')[5].split('.')[0]
ids.append((id, dirname, sldnum))
return ids
# cut; each level is 2 times difference (20x, 10x, 5x)
def cut():
# load standard image for normalization
std = staintools.read_image("../colorstandard.png")
std = staintools.LuminosityStandardizer.standardize(std)
CPTACpath = '../images/CPTAC/'
TCGApath = '../images/TCGA/'
ref = pd.read_csv('../dummy_His_MUT_joined.csv', header=0)
refls = ref['name'].tolist()
# cut tiles with coordinates in the name (exclude white)
start_time = time.time()
CPTAClist = image_ids_in(CPTACpath, 'CPTAC')
TCGAlist = image_ids_in(TCGApath, 'TCGA')
CPTACpp = pd.DataFrame(CPTAClist, columns=['id', 'dir', 'sld'])
CPTACcc = CPTACpp['dir'].value_counts()
CPTACcc = CPTACcc[CPTACcc > 1].index.tolist()
print(CPTACcc)
TCGApp = pd.DataFrame(TCGAlist, columns=['id', 'dir', 'sld'])
TCGAcc = TCGApp['dir'].value_counts()
TCGAcc = TCGAcc[TCGAcc > 1].index.tolist()
print(TCGAcc)
# CPTAC
for i in CPTAClist:
matchrow = ref.loc[ref['name'] == i[1]]
if matchrow.empty:
continue
try:
os.mkdir("../tiles/{}".format(i[1]))
except(FileExistsError):
pass
for m in range(4):
if m == 0:
tff = 1
level = 0
elif m == 1:
tff = 2
level = 0
elif m == 2:
tff = 1
level = 1
elif m == 3:
tff = 2
level = 1
otdir = "../tiles/{}/level{}".format(i[1], str(m))
try:
os.mkdir(otdir)
except(FileExistsError):
pass
try:
n_x, n_y, raw_img, ct = Slicer.tile(image_file='CPTAC/'+i[0], outdir=otdir,
level=level, std_img=std, dp=i[2], ft=tff)
except(IndexError):
pass
if len(os.listdir(otdir)) < 2:
shutil.rmtree(otdir, ignore_errors=True)
# else:
# print("pass: {}".format(str(i)))
# TCGA
for i in TCGAlist:
matchrow = ref.loc[ref['name'] == i[1]]
if matchrow.empty:
continue
try:
os.mkdir("../tiles/{}".format(i[1]))
except(FileExistsError):
pass
for m in range(4):
if m == 0:
tff = 2
level = 0
elif m == 1:
tff = 1
level = 1
elif m == 2:
tff = 2
level = 1
elif m == 3:
tff = 1
level = 2
otdir = "../tiles/{}/level{}".format(i[1], str(m))
try:
os.mkdir(otdir)
except(FileExistsError):
pass
try:
n_x, n_y, raw_img, ct = Slicer.tile(image_file='TCGA/'+i[0], outdir=otdir,
level=level, std_img=std, dp=i[2], ft=tff)
except Exception as e:
print('Error!')
pass
if len(os.listdir(otdir)) < 2:
shutil.rmtree(otdir, ignore_errors=True)
print("--- %s seconds ---" % (time.time() - start_time))
subfolders = [f.name for f in os.scandir('../tiles/') if f.is_dir()]
for w in subfolders:
if w not in refls:
print(w)
# # Time measure tool
# start_time = time.time()
# print("--- %s seconds ---" % (time.time() - start_time))
# Run as main
if __name__ == "__main__":
if not os.path.isdir('../tiles'):
os.mkdir('../tiles')
cut()
| 29.769737
| 106
| 0.478232
| 528
| 4,525
| 4.017045
| 0.299242
| 0.022631
| 0.018859
| 0.015559
| 0.392739
| 0.370108
| 0.328147
| 0.299859
| 0.299859
| 0.299859
| 0
| 0.023189
| 0.380552
| 4,525
| 151
| 107
| 29.966887
| 0.7335
| 0.089503
| 0
| 0.516949
| 0
| 0
| 0.074164
| 0.006587
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0.050847
| 0.067797
| 0
| 0.09322
| 0.050847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f8d0c7ea7f201118a072a6fce98f54b42edb4e97
| 524
|
py
|
Python
|
Lab5/load_graph.py
|
YuryMalyshev/CAD-with-Python
|
ecbb82b8efb436e7089b0895dc898cf956351046
|
[
"MIT"
] | null | null | null |
Lab5/load_graph.py
|
YuryMalyshev/CAD-with-Python
|
ecbb82b8efb436e7089b0895dc898cf956351046
|
[
"MIT"
] | null | null | null |
Lab5/load_graph.py
|
YuryMalyshev/CAD-with-Python
|
ecbb82b8efb436e7089b0895dc898cf956351046
|
[
"MIT"
] | null | null | null |
import numpy as np
def load_graph(path):
edges = np.array([])
with open(path, 'r', encoding='utf-8', errors='ignore') as g_file:
next(g_file) # skip the header line
for line in g_file:
try:
fields = line.split(",")
edges = np.append(edges, [int(fields[0]), int(fields[1]), int(fields[2])], axis=None)
edges = np.reshape(edges, (-1,3))
except Exception as e:
pass
return np.min(edges), np.max(edges), edges
| 32.75
| 101
| 0.532443
| 73
| 524
| 3.767123
| 0.630137
| 0.101818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016854
| 0.320611
| 524
| 15
| 102
| 34.933333
| 0.755618
| 0.038168
| 0
| 0
| 0
| 0
| 0.025896
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.076923
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
3e063c3a08ca1b49f1f08adcb5b79cf09de3aefe
| 4,128
|
py
|
Python
|
flask_mm/managers/__init__.py
|
szkkteam/flask_mm
|
ea96899a41a0573e51792f1554550c6d77f22a07
|
[
"MIT"
] | 1
|
2021-03-21T18:46:36.000Z
|
2021-03-21T18:46:36.000Z
|
flask_mm/managers/__init__.py
|
szkkteam/flask_mm
|
ea96899a41a0573e51792f1554550c6d77f22a07
|
[
"MIT"
] | null | null | null |
flask_mm/managers/__init__.py
|
szkkteam/flask_mm
|
ea96899a41a0573e51792f1554550c6d77f22a07
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Common Python library imports
import os
# Pip package imports
from six.moves.urllib.parse import urljoin
from flask import url_for, request, abort
from werkzeug import secure_filename, FileStorage, cached_property
# Internal package imports
from flask_mm.utils import UuidNameGen
from flask_mm.files import extension, lower_extension
from flask_mm.storages import BaseStorage
DEFAULT_MANAGER = 'file'
class BaseManager(object):
def __init__(self, app, name, storage, *args, **kwargs):
self.name = name
assert isinstance(storage, BaseStorage), "Storage object must be a subclass of BaseStorage"
self.storage = storage
# Optional parameters
self.allowed_extensions = kwargs.get('extensions', None)
self.namegen = kwargs.get('name_gen', UuidNameGen)
def _clean_url(self, url):
if not url.startswith('http://') and not url.startswith('https://'):
url = ('https://' if request.is_secure else 'http://') + url
if not url.endswith('/'):
url += '/'
return url
def url(self, filename, external=False):
if isinstance(filename, FileStorage):
filename = filename.filename
if filename.startswith('/'):
filename = filename[1:]
if self.storage.has_url:
# TODO: Clean url or not?
return urljoin(self._clean_url(self.storage.base_url), self.storage.path(filename))
else:
return url_for('mm.get_file', mm=self.name, filename=filename, _external=external)
def is_file_allowed(self, filename):
if not self.allowed_extensions:
return True
return (extension(filename) in self.allowed_extensions)
def generate_name(self, filename_or_wfs):
if isinstance(filename_or_wfs, FileStorage):
return self.namegen.generate_name(filename_or_wfs.filename)
return self.namegen.generate_name(filename_or_wfs)
def path(self, filename):
if not hasattr(self.storage, 'path'):
raise RuntimeError("Direct file access is not supported by " + self.storage.__class__.__name__)
return self.storage.path(filename)
def archive_files(self, out_filename, files, *args, **kwargs):
return self.storage.archive_files(out_filename, files, *args, **kwargs)
def exists(self, filename):
return self.storage.exists(filename)
def is_allowed(self, filename):
return self.is_file_allowed(filename)
def read(self, filename):
if not self.exists(filename):
raise FileNotFoundError(filename)
return self.storage.read(filename)
def open(self, filename, mode='r', **kwargs):
if 'r' in mode and not self.storage.exists(filename):
raise FileNotFoundError(filename)
return self.storage.open(filename, mode, **kwargs)
def write(self, filename, content, overwrite=False):
if not overwrite and self.exists(filename):
raise FileExistsError(filename)
return self.storage.write(filename, content)
def delete(self, filename):
return self.storage.delete(filename)
def save(self, file_or_wfs, filename=None, **kwargs):
if not filename and isinstance(file_or_wfs, FileStorage):
filename = lower_extension(secure_filename(file_or_wfs.filename))
if not filename:
raise ValueError('filename is required')
if not self.is_allowed(filename):
raise ValueError('File type is not allowed.')
self.storage.save(file_or_wfs, filename, **kwargs)
return filename
def list_files(self):
return self.storage.list_file()
def metadata(self, filename):
metadata = self.storage.metadata(filename)
metadata['filename'] = os.path.basename(filename)
# TODO: Impelement url getter
#metadata['url'] = self.url
def serve(self, filename):
'''Serve a file given its filename'''
if not self.exists(filename):
abort(404)
return self.storage.serve(filename)
| 35.282051
| 107
| 0.66376
| 501
| 4,128
| 5.335329
| 0.249501
| 0.074074
| 0.057239
| 0.046764
| 0.145529
| 0.095024
| 0.077067
| 0.077067
| 0
| 0
| 0
| 0.001581
| 0.233769
| 4,128
| 117
| 108
| 35.282051
| 0.843503
| 0.059835
| 0
| 0.05
| 0
| 0
| 0.054809
| 0
| 0
| 0
| 0
| 0.008547
| 0.0125
| 1
| 0.2125
| false
| 0
| 0.0875
| 0.0625
| 0.5375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e07225d9f986640eeceeb3fecfcd08a0bbf84a5
| 1,627
|
py
|
Python
|
web/api/user/core.py
|
cclrobotics/ARTBot
|
a0bffabebbc09361bf7748741fe3d30c78af8fbd
|
[
"MIT"
] | 5
|
2020-12-04T19:28:42.000Z
|
2021-12-07T16:14:28.000Z
|
web/api/user/core.py
|
cclrobotics/ARTBot
|
a0bffabebbc09361bf7748741fe3d30c78af8fbd
|
[
"MIT"
] | 50
|
2019-10-08T19:47:24.000Z
|
2021-07-26T05:43:37.000Z
|
web/api/user/core.py
|
cclrobotics/ARTBot
|
a0bffabebbc09361bf7748741fe3d30c78af8fbd
|
[
"MIT"
] | 4
|
2019-10-23T04:14:49.000Z
|
2021-08-01T01:22:37.000Z
|
from functools import partial
from marshmallow import ValidationError
from web.extensions import db
from .validators import validate_user_token
from .serializers import SuperUserSchema
from .exceptions import InvalidUsage
from .user import SuperUser
def validate_and_extract_user_data(json_data, skipped_fields: tuple= (), new_user: bool=False):
try:
data = SuperUserSchema(new_user).load(json_data, partial=skipped_fields)
except ValidationError as err:
raise InvalidUsage.from_validation_error(err)
return data
def create_superuser(email, password, role = SuperUser.default_role()):
s_user = SuperUser.from_email(email, role=role)
s_user.set_password(password)
db.session.commit()
return s_user.id, True
def update_superuser_role(email, new_role):
s_user = SuperUser.get_by_email(email)
old_role = s_user.role
s_user.set_role(new_role)
db.session.commit()
return s_user.email, old_role.value, s_user.role.value
def update_superuser_password(email, new_password, created_at_timestamp):
s_user = SuperUser.get_by_email(email)
validate_user_token(s_user, created_at_timestamp)
s_user.set_password(new_password)
db.session.commit()
return s_user.email, True
def delete_superuser(id, created_at_timestamp):
"""
Delete a user record from the SuperUser table
For added security, must provide exact creation datetime
of the user, in timestamp format
"""
s_user = SuperUser.get_by_id(id)
validate_user_token(s_user, created_at_timestamp)
s_user.delete()
db.session.commit()
return s_user.email, True
| 31.288462
| 95
| 0.761524
| 232
| 1,627
| 5.060345
| 0.314655
| 0.068143
| 0.03833
| 0.07155
| 0.279387
| 0.247871
| 0.247871
| 0.136286
| 0.076661
| 0.076661
| 0
| 0
| 0.161647
| 1,627
| 52
| 96
| 31.288462
| 0.860704
| 0.082975
| 0
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138889
| false
| 0.111111
| 0.194444
| 0
| 0.472222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
3e08ccba7d47176de06f3bb412445c1550a56baf
| 463
|
py
|
Python
|
jaxfg/core/__init__.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 120
|
2020-11-28T19:43:31.000Z
|
2022-03-29T02:35:46.000Z
|
jaxfg/core/__init__.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 12
|
2021-05-24T09:02:12.000Z
|
2022-03-30T19:51:40.000Z
|
jaxfg/core/__init__.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 9
|
2021-05-06T15:31:23.000Z
|
2022-03-23T12:06:44.000Z
|
from ._factor_base import FactorBase
from ._factor_stack import FactorStack
from ._stacked_factor_graph import StackedFactorGraph
from ._storage_metadata import StorageMetadata
from ._variable_assignments import VariableAssignments
from ._variables import RealVectorVariable, VariableBase
__all__ = [
"FactorStack",
"FactorBase",
"StackedFactorGraph",
"StorageMetadata",
"VariableAssignments",
"RealVectorVariable",
"VariableBase",
]
| 27.235294
| 56
| 0.792657
| 39
| 463
| 9
| 0.512821
| 0.05698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140389
| 463
| 16
| 57
| 28.9375
| 0.88191
| 0
| 0
| 0
| 0
| 0
| 0.222462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
3e22427e89b56fa4293c96f943f7ce0b77c3a1a7
| 2,759
|
py
|
Python
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 14
|
2019-01-08T14:32:31.000Z
|
2021-11-17T21:07:10.000Z
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 2
|
2019-05-14T08:56:36.000Z
|
2020-12-22T16:44:30.000Z
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 4
|
2019-03-12T20:03:54.000Z
|
2022-03-08T14:24:46.000Z
|
import configparser
class Configuration():
DefaultDic = {'windowDefault':{'HS' : 0,\
'VS' : 0,\
'energy' : 20,\
'azimuth' : 0,\
'scaleBarLength' : 5,\
'chiRange' : 60,\
'width' : 0.4,\
'widthSliderScale' : 100,\
'radius' : 5,\
'radiusMaximum' : 20,\
'radiusSliderScale' : 10,\
'tiltAngle' : 0,\
'tiltAngleSliderScale' : 10},\
'propertiesDefault':{'sensitivity': 361.13,\
'electronEnergy': 20,\
'azimuth': 0,\
'scaleBarLength': 5,\
'brightness': 20,\
'brightnessMinimum': 0,\
'brightnessMaximum': 100,\
'blackLevel': 50,\
'blackLevelMinimum': 0,\
'blackLevelMaximum': 500,\
'integralHalfWidth': 0.4,\
'widthMinimum': 0,\
'widthMaximum': 1,\
'widthSliderScale': 100,\
'chiRange': 60,\
'chiRangeMinimum': 0,\
'chiRangeMaximum': 180,\
'radius': 5,\
'radiusMinimum': 0,\
'radiusMaximum': 20,\
'radiusSliderScale': 10,\
'tiltAngle': 0,\
'tiltAngleMinimum': -15,\
'tiltAngleMaximum': 15,\
'tiltAngleSliderScale': 10},\
'canvasDefault':{'widthInAngstrom' : 0.4,\
'radiusMaximum' : 20,\
'span' : 60,\
'tilt' : 0,\
'max_zoom_factor' : 21},\
'chartDefault':{'theme':1}}
def save_defaults(self,Dic = DefaultDic):
config = configparser.ConfigParser()
config.read_dict(Dic)
with open('./configuration.ini','w') as configfile:
config.write(configfile)
| 51.092593
| 66
| 0.303733
| 126
| 2,759
| 6.619048
| 0.595238
| 0.007194
| 0.023981
| 0.057554
| 0.165468
| 0.105516
| 0
| 0
| 0
| 0
| 0
| 0.070081
| 0.596593
| 2,759
| 53
| 67
| 52.056604
| 0.679245
| 0
| 0
| 0.372549
| 0
| 0
| 0.215658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.019608
| 0
| 0.078431
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e38f387d0ad96aa627dd060a7aa1188e154c4a3
| 10,017
|
py
|
Python
|
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | 1
|
2022-02-10T07:07:35.000Z
|
2022-02-10T07:07:35.000Z
|
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | null | null | null |
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
from visHullTwoD import Scene, SegmentType
#%%
def doubleFaceTest(f):
doubleFace = False
origHE = f.halfEdge
he = f.halfEdge.next
while he != origHE:
if f.index != he.leftFace.index:
doubleFace = True
break
he = he.next
if doubleFace:
print("Double face ({0}):".format(f.index))
origHE = f.halfEdge
he = f.halfEdge.next
while he != origHE:
fIndex = he.leftFace.index
v0 = he.prev.headVertex.position
v1 = he.headVertex.position
print(" - F{0}, {1}->{2}".format(fIndex, v0, v1))
he = he.next
v0 = he.prev.headVertex.position
v1 = he.headVertex.position
print(" - F{0}, {1}->{2}".format(fIndex, v0, v1))
print("-----")
def checkEventEquality(w0, w1):
print("== Event check ==")
numEvents0 = len(w0.eventsRecord)
numEvents1 = len(w1.eventsRecord)
if numEvents0 != numEvents1:
print("NUMBER OF EVENT RECORDS DIFFERENT! w0: {0}, w1: {1}".format(numEvents0, numEvents1))
minEvents = min(numEvents0, numEvents1)
for i in range(minEvents):
eventsEq = w0.eventsRecord[i].debugEq(w1.eventsRecord[i])
if not np.all(list(eventsEq.values())):
print(" - DIFF AT {0}: {1}".format(i, eventsEq))
print("Done event check!\n")
#%%
def drawScene(scene):
print("cwList:", scene.cwList)
# Plot all polygons.
for obj in scene.polygons:
x,y = obj.getSeparateXYs()
plt.fill(x,y, "#A0A0A0") # light grey fill
plt.plot(x,y, "#505050") # dark grey edges/outline
'''
for ln in scene.lines:
p0, p1 = scene.sceneBorderHitPoints(ln)
plt.plot([p0[0], p1[0]], [p0[1], p1[1]], "k--")
'''
for ln in scene.activeSegments:
colString = "g"
if ln.activeType == SegmentType.A:
colString = "r"
elif ln.activeType == SegmentType.B:
colString = "b"
# Magenta if vn increase to right (for vert lines) or down
# Cyan otherwise
colString2 = "c"
if ln.isVertical:
if (ln.p1[1] > ln.p0[1] and ln.increasesToTheRight) or (ln.p1[1] < ln.p0[1] and not ln.increasesToTheRight):
colString2 = "m"
else:
if (ln.p1[0] > ln.p0[0] and ln.increasesToTheRight) or (ln.p1[0] < ln.p0[0] and not ln.increasesToTheRight):
colString2 = "m"
plt.plot([ln.p0[0], ln.p1[0]], [ln.p0[1], ln.p1[1]], colString2)
'''for halfEdge in partitionMesh.halfEdges:
if halfEdge.headVertex is not None and halfEdge.pair.headVertex is not None:
v0 = halfEdge.headVertex.position
v1 = halfEdge.pair.headVertex.position
plt.plot([v0[0], v1[0]], [v0[1], v1[1]], "r--")
else:
print("Some problem")'''
colours = ["k", "r", "g", "b", "y"]
for f in scene.drawableFaces:
#print("Visual number:", f.visualNumber)
regionColour = colours[min(f.visualNumber, len(colours) - 1)]
pts = f.getCoords()
xs = pts[:, 0]
ys = pts[:, 1]
plt.fill(xs, ys, regionColour)
convex = []
concave = []
for i in range(scene.vertices.shape[0]):
if scene.isVertexConcave(i):
concave.append(scene.vertices[i])
else:
convex.append(scene.vertices[i])
npConvex = np.array(convex)
npConcave = np.array(concave)
'''
for maxSeg in self.activeSegments:
for succSeg in self.activeSegments:
succInt = maxSeg.intersection(succSeg)
onFirstSegment = succInt.meetS > -EQUAL_THRESHOLD and succInt.meetS < maxSeg.length + EQUAL_THRESHOLD
onSecondSegment = succInt.meetT > -EQUAL_THRESHOLD and succInt.meetT < succSeg.length + EQUAL_THRESHOLD
if succInt.doMeet and onFirstSegment and onSecondSegment:
plt.plot([succInt.meetPt[0]], [succInt.meetPt[1]], 'ko')
'''
'''if npConvex.shape[0] > 0:
plt.plot(npConvex[:, 0], npConvex[:, 1], 'bo')
if npConcave.shape[0] > 0:
plt.plot(npConcave[:, 0], npConcave[:, 1], 'go')'''
plt.show()
world0 = Scene()
world1 = Scene()
world2 = Scene()
world3 = Scene()
world4 = Scene()
world5 = Scene()
world6 = Scene()
world7 = Scene()
world8 = Scene()
world9 = Scene()
world10 = Scene()
world11 = Scene()
world12 = Scene()
# These are the tris from Petitjean's diagram
polygon1 = [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world0.addPolygon(polygon1)
world0.addPolygon(polygon2)
world0.addPolygon(polygon3)
#world0.addPolygon(polygon4)
polygon1 = [(0, 0), (5, 0), (5, 5), (4, 5), (4, 3), (1, 3), (1, 5), (0, 5)]
world1.addPolygon(polygon1)
polygon1 = [(0, 0), (5, 0), (5, 3), (4, 3), (4, 5), (1, 5), (1, 3), (0, 3)]
polygon2 = [(1, 7), (3, 7), (5, 9), (4, 11), (4, 9), (1, 8), (2, 10), (0, 10)]
world2.addPolygon(polygon1)
world2.addPolygon(polygon2)
polygon1 = [(0, 2), (1,1), (2,2), (1,0)]
polygon2 = [(3,3), (4,2), (5,3)]
# polygon2 = [(p[0] - 3, p[1]) for p in polygon2]
# Horizontal flip for testing purposes.
polygon1 = [(-p[0], p[1]) for p in polygon1]
polygon2 = [(-p[0], p[1]) for p in polygon2]
world3.addPolygon(polygon1)
world3.addPolygon(polygon2)
polygon1 = [(0, 7), (2.25, 5), (1.25, 4), (5, 5)] # [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, -3.15), (4, -4), (2, -7), (0.9, -5.25)] #[(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 1), (3, 0.0), (4.85, 0.75), (4.85, 2.4), (5,4)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
polygon4 = [(-0.5, -1), (-0.5, 1.0), (0.5, 1), (0.5, -1)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world4.addPolygon(polygon1)
world4.addPolygon(polygon2)
world4.addPolygon(polygon3)
world4.addPolygon(polygon4)
polygon1 = [(0, 0.6), (1.5, 0), (2.5, 1.25), (1.25, 0.75), (1.125, 1.8)]
polygon2 = [(1.3, 2.25), (2.8, 2.8), (1.65, 3.125)]
polygon3 = [(2.8, 1.25), (4.125, 0.25), (3.5, 2.0)]
world5.addPolygon(polygon1)
world5.addPolygon(polygon2)
world5.addPolygon(polygon3)
polygon1 = [(0,0), (2.5, 0), (0, 1.5)]
polygon2 = [(0, 3.25), (5, 4.25), (0, 4.25)]
polygon3 = [(3.5, 0), (5, 0), (5, 2.75), (3.5, 2.75)]
world6.addPolygon(polygon1)
world6.addPolygon(polygon2)
world6.addPolygon(polygon3)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1), (0, 0), (1, -1), (2, -1), (2, 1), (1, 1), (0, 2)]
world7.addPolygon(polygon1)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1)]
polygon2 = [(-1, -1), (0, 0), (1, -1), (1, 1), (0, 2), (-1, 1)]
polygon3 = [(1, -1), (2, -1), (2, 1), (1, 1)]
# polygon1 = [(p[0], 0.9*p[1]) for p in polygon1]
# polygon3 = [(p[0], 0.9*p[1]) for p in polygon3]
world8.addPolygon(polygon1)
world8.addPolygon(polygon2)
world8.addPolygon(polygon3)
# 0.9999995231628418
polygon1 = [(-1, -1), (1, -1), (1, 1), (-1, 1)]
polygon2 = [(1, 1), (2, -1), (3, 0), (2, 1)]
world9.addPolygon(polygon1)
world9.addPolygon(polygon2)
polygon1 = [(0.734870970249176, 0.26040399074554443), (-0.045375000685453415, 0.8651400208473206), (-0.8234530091285706, 0.4177840054035187), (-0.14182999730110168, 0.21450699865818024)]
polygon2 = [(-1.0, 1.0108875036239624), (1.0, 1.010890007019043), (1.0, 1.3735400438308716), (-1.0, 1.373543620109558)]
world10.addPolygon(polygon2)
world10.addPolygon(polygon1)
polygon0 = [(0.734870970249176, -1.1526894569396973), (-0.045375000685453415, 1.1651400327682495), (-0.8234530091285706, -0.9953095316886902), (-0.14182999730110168, -1.1985864639282227)]
polygon1 = [(2.1045942306518555, -2.0704498291015625), (2.1045916080474854, 1.9576737880706787), (1.7419415712356567, 1.9576740264892578), (1.7419381141662598, -2.0704498291015625)]
polygon2 = [(-1.7419382333755493, -2.0704498291015625), (-1.741940975189209, 1.9576740264892578), (-2.10459041595459, 1.9576740264892578), (-2.1045944690704346, -2.0704495906829834)]
world11.addPolygon(polygon0)
world11.addPolygon(polygon1)
world11.addPolygon(polygon2)
polygon0 = [(0.7000000476837158, -1.2000000476837158), (-0.10000000149011612, 1.2000000476837158), (-0.800000011920929, -1.0), (-0.10000000149011612, -1.25)]
polygon1 = [(2.0999999046325684, -2.0999999046325684), (2.0999999046325684, 1.899999976158142), (1.7000000476837158, 1.899999976158142), (1.7000000476837158, -2.0999999046325684)]
polygon2 = [(-1.7000000476837158, -2.0999999046325684), (-1.7000000476837158, 1.899999976158142), (-2.1000001430511475, 1.899999976158142), (-2.1000001430511475, -2.0999999046325684)]
world12.addPolygon(polygon0)
world12.addPolygon(polygon1)
world12.addPolygon(polygon2)
#world.addLine((0, 2.5), (3, 2.5))
worlds = [world0, world1, world2, world3, world4, world5, world6, world7, world8, world9, world10]
worldIndex = 0
for w in worlds:
print("\nWorld:", worldIndex)
worldIndex += 1
w.calcFreeLines()
drawScene(w)
faceList = w.partitionMesh.faces
for k in faceList:
doubleFaceTest(faceList[k])
checkEventEquality(world12, world11)
#%%
reminders = [
"Is there a better way, using cos(), to handle parallelism in isLineInsideEdgeAngle()?",
"Pruning of lines that intersect obj at CONTACT verts. (I sort of forget what this self-reminder meant...)",
"Pruning of segments outside convex hull.",
"Right now, swapDir() side effect in findIntersections(). Should this be changed?",
"Just generally take a second look at how floating-point precision problems are handled.\nEspecially for the y-intercept of MyLine, since a very small difference in coordinates can lead to a larger difference in y-intercepts.\nSo instead of comparing y-intercepts, something else should maybe be compared!"
]
for reminder in reminders:
sep = "==========="
print("\n" + sep + "\n" + reminder + "\n" + sep + "\n")
| 38.526923
| 311
| 0.605471
| 1,388
| 10,017
| 4.366715
| 0.223343
| 0.009899
| 0.00594
| 0.00396
| 0.143046
| 0.127702
| 0.091569
| 0.07837
| 0.07705
| 0.066161
| 0
| 0.211822
| 0.201158
| 10,017
| 260
| 312
| 38.526923
| 0.545614
| 0.074174
| 0
| 0.102857
| 0
| 0.011429
| 0.103465
| 0.00297
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017143
| false
| 0
| 0.017143
| 0
| 0.034286
| 0.062857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e6ad0d35aefd868861d6a14144cf80665b8e7ea
| 274
|
py
|
Python
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | 1
|
2019-12-02T15:10:49.000Z
|
2019-12-02T15:10:49.000Z
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | 5
|
2015-08-13T16:00:04.000Z
|
2016-03-14T18:43:11.000Z
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
DISTNAME='earthdragon'
FULLVERSION='0.1'
setup(
name=DISTNAME,
version=FULLVERSION,
packages=['earthdragon'],
install_requires = [
'asttools',
'toolz',
'typeguard',
'more_itertools',
]
)
| 16.117647
| 32
| 0.605839
| 24
| 274
| 6.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00995
| 0.266423
| 274
| 16
| 33
| 17.125
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0.222628
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e730fa82d3520ad13dc948a854e1cd1df0331d4
| 275
|
py
|
Python
|
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup
setup(
name='install-raspberry',
version='',
packages=[''],
url='https://github.com/grro/httpstreamproxy',
license='Apache Software License',
author='grro',
author_email='[email protected]',
description='test'
)
| 21.153846
| 50
| 0.658182
| 30
| 275
| 6
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178182
| 275
| 12
| 51
| 22.916667
| 0.79646
| 0
| 0
| 0
| 0
| 0
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e73f6fed18b11f2933d0b20530ca1d6b4de649e
| 2,701
|
py
|
Python
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | 2
|
2015-01-04T21:27:45.000Z
|
2015-01-05T13:31:52.000Z
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | 28
|
2015-01-04T22:13:24.000Z
|
2019-11-29T13:41:01.000Z
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import inspect
from py_privatekonomi.utilities import common
from py_privatekonomi.tests.test_base import TestBase
from py_privatekonomi.tests.dataset.swedbank.sample1 import test_data as test_data_1
from py_privatekonomi.tests.dataset.swedbank.sample2 import test_data as test_data_2
from py_privatekonomi.tests.dataset.swedbank.sample3 import test_data as test_data_3
from py_privatekonomi.tests.dataset.swedbank.sample5 import test_data as test_data_5
class TestSwedbankDB(TestBase):
def setUp(self):
pass
def test_sample1_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample1',
'swedbank',
'swedbank',
persist=True,
config=self.get_default_config())
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_1, format_as_mapper=True)
self.assertPersisted(test_data_1)
def test_sample2_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample2',
'swedbank',
'swedbank',
persist=True,
config=self.get_default_config())
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_2, format_as_mapper=True)
self.assertPersisted(test_data_2)
def test_sample3_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample3',
'swedbank',
'swedbank',
config=self.get_default_config(),
persist=True)
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_3, format_as_mapper=True)
self.assertPersisted(test_data_3)
def test_sample5_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample5',
'swedbank',
'swedbank',
config=self.get_default_config(),
persist=True)
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_5, format_as_mapper=True)
self.assertPersisted(test_data_5)
if __name__ == '__main__':
unittest.main()
| 37
| 84
| 0.652721
| 312
| 2,701
| 5.375
| 0.217949
| 0.076327
| 0.067979
| 0.071556
| 0.751342
| 0.751342
| 0.601073
| 0.601073
| 0.493739
| 0.493739
| 0
| 0.018191
| 0.246946
| 2,701
| 73
| 85
| 37
| 0.806293
| 0.01555
| 0
| 0.5
| 0
| 0
| 0.129421
| 0.088789
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.078125
| false
| 0.015625
| 0.171875
| 0
| 0.265625
| 0.078125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3e78c123f36641a6b522ac2d459248b01e28de60
| 1,204
|
py
|
Python
|
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | 1
|
2017-10-23T14:58:47.000Z
|
2017-10-23T14:58:47.000Z
|
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | null | null | null |
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | 1
|
2018-04-06T07:49:18.000Z
|
2018-04-06T07:49:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PIL import Image, ImageDraw, ImageFont, ImageFilter
import random
im = Image.open('F:/workspace/python/data/backpink.jpg')
im2 = im.filter(ImageFilter.BLUR)
im2.save('F:/workspace/python/data/backpink_blur.png', 'png')
im2.save('F:/workspace/python/data/backpink_blur.jpg', 'jpeg')
# 随机字母:
def random_char():
return chr(random.randint(65, 90))
# 随机颜色1:
def random_color():
return random.randint(64, 255), random.randint(64, 255), random.randint(64, 255)
# 随机颜色2:
def random_color2():
return random.randint(32, 127), random.randint(32, 127), random.randint(32, 127)
# 240 x 60:
width = 60 * 4
height = 60
image = Image.new('RGB', (width, height), (255, 255, 255))
# 创建Font对象:
font = ImageFont.truetype('C:/Windows/Fonts/Arial.ttf', 36)
# 创建Draw对象:
draw = ImageDraw.Draw(image)
# 填充每个像素:
for x in range(width):
for y in range(height):
draw.point((x, y), fill=random_color())
# 输出文字:
for t in range(4):
draw.text((60 * t + 10, 10), random_char(), font=font, fill=random_color2())
# 模糊:
image = image.filter(ImageFilter.BLUR)
image.save('code.jpg', 'jpeg')
print((image.format, image.size, image.mode))
# image.show()
| 24.571429
| 84
| 0.680233
| 183
| 1,204
| 4.431694
| 0.442623
| 0.112207
| 0.059186
| 0.073983
| 0.263872
| 0.229346
| 0.229346
| 0.229346
| 0
| 0
| 0
| 0.067372
| 0.137043
| 1,204
| 48
| 85
| 25.083333
| 0.713186
| 0.102159
| 0
| 0
| 0
| 0
| 0.158092
| 0.137512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.08
| 0.12
| 0.32
| 0.04
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
3e8b7eee7855784a75f5858aea2cd7099da89f3d
| 4,197
|
py
|
Python
|
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | 1
|
2019-01-26T22:33:02.000Z
|
2019-01-26T22:33:02.000Z
|
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | null | null | null |
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | null | null | null |
"""
gistsig
Derek Merck
Winter 2019
Sign and verify Python packages using public gists.
"""
import logging
from pprint import pformat
from datetime import datetime
import click
from . import get_gist, update_gist
from . import get_pkg_info, get_pkg_gist
@click.group()
@click.option('--verbose', '-v', is_flag=True, default=False)
@click.option('--gist_id', '-g', help="Public gist id with reference signatures.")
@click.option('--gist_oauth_tok', '-o', help="Github token (only if pushing new signatures)")
@click.pass_context
def cli(ctx, verbose, gist_id, gist_oauth_tok):
"""
Perform a simple public signature lookup to verify local Python package
files.
\b
Example:
$ gistsig -g 4b0bfbca0a415655d97f36489629e1cc show diana
Local package has signature python-diana:2.0.13:9fec66ac3f4f87f8b933c853d8d5f49bdae0c1dc
"""
ctx.obj['gist_id'] = gist_id
ctx.obj['gist_oauth_tok'] = gist_oauth_tok
if verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.ERROR)
pass
@cli.command()
@click.argument('packages', nargs=-1)
def show(packages):
"""Compute local package signature."""
for pkg_name in packages:
key, value = get_pkg_info(pkg_name)
msg = click.style("Local package has signature {}:{}.".format(key, value), fg='yellow')
click.echo(msg)
def find_gist_id(pkg_name):
# Check the package
gist_id = get_pkg_gist(pkg_name)
if gist_id and \
click.confirm("No reference gist set, use package declared gist? ({})".format(gist_id)):
return gist_id
click.echo("No gist found for this package")
return None
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def pull(ctx, packages):
"""Show public package signatures."""
gist_id = ctx.obj['gist_id']
for pkg_name in packages:
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
msg = click.style("Reference package has signatures:", fg='yellow')
click.echo(msg)
click.echo(pformat(pkg_sigs))
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def verify(ctx, packages):
"""Compare local to public package signatures."""
exit_code = 0
gist_id = ctx.obj['gist_id']
for pkg_name in packages:
key, value = get_pkg_info(pkg_name)
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
ref = None
if pkg_sigs:
entry = pkg_sigs.get(key)
if entry:
ref = entry.get('hash')
if value != ref:
msg = click.style("Package signature {}:{} is not valid.".format(key, value), fg='red')
click.echo(msg)
exit_code = 1
else:
msg = click.style("Package signature {}:{} is valid.".format(key, value), fg="green")
click.echo(msg)
exit(exit_code)
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def push(ctx, packages):
"""Update public package signatures"""
gist_id = ctx.obj['gist_id']
gist_oauth_tok = ctx.obj['gist_oauth_tok']
if not gist_oauth_tok:
click.echo("Need a gist oauth token to push data. Set with envvar or on the cli.")
exit(1)
for pkg_name in packages:
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
logging.debug("Found pkg keys:")
logging.debug(pformat(pkg_sigs))
key, value = get_pkg_info(pkg_name)
click.echo("Submitting signature {}:{}".format(key, value))
pkg_sigs[key] = { "hash": value,
"time": datetime.now().isoformat() }
update_gist(oauth_tok=gist_oauth_tok, gist_id=gist_id,
name=pkg_name, content=pkg_sigs)
def _cli():
cli.add_command(show)
cli.add_command(pull)
cli.add_command(verify)
cli.add_command(push)
cli(auto_envvar_prefix="GISTSIG", obj={})
if __name__ == "__main__":
_cli()
| 27.794702
| 99
| 0.646414
| 581
| 4,197
| 4.462995
| 0.232358
| 0.076359
| 0.038565
| 0.037023
| 0.387968
| 0.311994
| 0.25646
| 0.237948
| 0.237948
| 0.213266
| 0
| 0.017044
| 0.231117
| 4,197
| 150
| 100
| 27.98
| 0.786489
| 0.114844
| 0
| 0.357143
| 0
| 0
| 0.162147
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.05102
| 0.061224
| 0
| 0.153061
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e41482448ad0c9a9ce2ec0102c5edc24cd4e69ff
| 11,339
|
py
|
Python
|
tests/test_views/test_memberships.py
|
freelancing-solutions/GCP-Based-Database-as-a-Service
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 1
|
2021-04-15T19:45:04.000Z
|
2021-04-15T19:45:04.000Z
|
tests/test_views/test_memberships.py
|
freelancing-solutions/pinydesk
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 516
|
2021-05-02T11:46:36.000Z
|
2022-03-29T06:09:49.000Z
|
tests/test_views/test_memberships.py
|
freelancing-solutions/pinydesk
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 1
|
2021-09-04T22:40:14.000Z
|
2021-09-04T22:40:14.000Z
|
import random
import typing
from datetime import datetime, timedelta
from random import randint
from google.cloud import ndb
from data_service.config.stocks import currency_symbols
from data_service.store.mixins import AmountMixin
from data_service.views.memberships import MembershipsView
from data_service.store.memberships import Memberships, MembershipPlans
from data_service.utils.utils import create_id
from .. import test_app
# noinspection PyUnresolvedReferences
from pytest import raises
# noinspection PyUnresolvedReferences
from pytest_mock import mocker
class MembershipsQueryMock:
membership_instance: Memberships = Memberships()
results_range: int = randint(0, 100)
def __init__(self):
self.membership_instance.plan_id = create_id()
self.membership_instance.status = "paid"
self.membership_instance.date_created = datetime.now()
self.membership_instance.plan_start_date = datetime.now().date()
self.membership_instance.payment_method = 'paypal'
def fetch(self) -> typing.List[Memberships]:
return [self.membership_instance for _ in range(self.results_range)]
def get(self) -> Memberships:
return self.membership_instance
@ndb.tasklet
def get_async(self):
return self.membership_instance
class MembershipPlansQueryMock:
membership_plan_instance: MembershipPlans = MembershipPlans()
results_range: int = randint(0, 100)
def __init__(self):
self.membership_plan_instance.date_created = datetime.now()
self.membership_plan_instance.plan_name = "bronze"
self.membership_plan_instance.description = "bronze plan"
self.membership_plan_instance.total_members = 10
self.membership_plan_instance.schedule_day = 1
self.membership_plan_instance.schedule_term = "monthly"
self.membership_plan_instance.term_payment_amount = AmountMixin(amount=100,
currency=random.choice(currency_symbols()))
self.membership_plan_instance.registration_amount = AmountMixin(amount=100,
currency=random.choice(currency_symbols()))
def fetch(self) -> typing.List[MembershipPlans]:
return [self.membership_plan_instance for _ in range(self.results_range)]
def get(self) -> MembershipPlans:
return self.membership_plan_instance
membership_mock_data: dict = {
"uid": create_id(),
"plan_id": create_id(),
"status": "unpaid",
"date_created": datetime.now(),
"plan_start_date": datetime.date(datetime.now() + timedelta(days=5))
}
# noinspection PyShadowingNames
def test_create_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
plan_id = membership_mock_data['plan_id']
plan_start_date = membership_mock_data['plan_start_date']
response, status = membership_view_instance.add_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
response_data: dict = response.get_json()
assert status == 500, response_data['message']
mocker.patch('data_service.store.users.UserValidators.is_user_valid', return_value=True)
mocker.patch('data_service.store.memberships.PlanValidators.plan_exist', return_value=False)
mocker.patch('data_service.store.memberships.MembershipValidators.start_date_valid', return_value=True)
# mocker.patch('data_service.views.memberships.Validators.can_add_member', return_value=True)
response, status = membership_view_instance.add_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
response_data: dict = response.get_json()
assert status == 200, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_update_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
plan_id = membership_mock_data['plan_id']
plan_start_date = membership_mock_data['plan_start_date']
mocker.patch('data_service.store.users.UserValidators.is_user_valid', return_value=True)
mocker.patch('data_service.store.memberships.PlanValidators.plan_exist', return_value=False)
mocker.patch('data_service.store.memberships.MembershipValidators.start_date_valid', return_value=True)
response, status = membership_view_instance.update_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
assert status == 200, "Unable to update membership"
response_data: dict = response.get_json()
assert response_data.get('message') is not None, "message was not set properly"
assert response_data.get('payload') is not None, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_set_membership_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
status = membership_mock_data['status']
response, status = membership_view_instance.set_membership_status(uid=uid, status=status)
assert status == 200, "Unable to set membership status"
response, status = membership_view_instance.set_membership_status(uid=uid, status="paid")
assert status == 200, "Unable to set membership status"
mocker.stopall()
# noinspection PyShadowingNames
def test_change_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
membership_query_mock_instance = MembershipsQueryMock()
membership_query_mock_instance.membership_instance.plan_id = membership_mock_data['plan_id']
mocker.patch('google.cloud.ndb.Model.query', return_value=membership_query_mock_instance)
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
dest_plan_id: str = create_id()
mocker.patch('data_service.views.memberships.MembershipsView.plan_exist', return_value=True)
response, status = membership_view_instance.change_membership(uid=uid, origin_plan_id=plan_id,
dest_plan_id=dest_plan_id)
assert status == 200, "Unable to change membership"
mocker.stopall()
# noinspection PyShadowingNames
def test_send_welcome_email(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
response, status = membership_view_instance.send_welcome_email(uid=uid, plan_id=plan_id)
assert status == 200, "unable to send welcome email"
mocker.stopall()
# noinspection PyShadowingNames
def test_plan_members_payment_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
status: str = membership_mock_data['status']
response, status = membership_view_instance.return_plan_members_by_payment_status(plan_id=plan_id,
status=status)
assert status == 200, "unable to fetch plan members by status"
mocker.stopall()
# noinspection PyShadowingNames
def test_return_plan_members(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
plan_id: str = membership_mock_data['plan_id']
response, status = membership_view_instance.return_plan_members(plan_id=plan_id)
assert status == 200, "unable to fetch plan members"
mocker.stopall()
# noinspection PyShadowingNames
def test_is_member_off(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
response, status = membership_view_instance.is_member_off(uid=uid)
assert status == 200, "unable to test membership status"
mocker.stopall()
# noinspection PyShadowingNames
def test_payment_amount(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
mocker.patch('data_service.views.memberships.MembershipPlansView.get_plan',
return_value=MembershipPlansQueryMock().get())
response, status = membership_view_instance.payment_amount(uid=uid)
response_data: dict = response.get_json()
assert status == 200, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_set_payment_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
status: str = "paid"
response, status = membership_view_instance.set_payment_status(uid=uid, status=status)
assert status == 200, "Unable to set payment status"
status: str = "unpaid"
response, status = membership_view_instance.set_payment_status(uid=uid, status=status)
assert status == 200, "Unable to set payment status"
mocker.stopall()
| 45.175299
| 115
| 0.71038
| 1,310
| 11,339
| 5.860305
| 0.101527
| 0.025791
| 0.065911
| 0.057314
| 0.756155
| 0.711606
| 0.668881
| 0.636577
| 0.587469
| 0.552299
| 0
| 0.006232
| 0.193315
| 11,339
| 250
| 116
| 45.356
| 0.83306
| 0.040833
| 0
| 0.527174
| 0
| 0
| 0.146658
| 0.092985
| 0
| 0
| 0
| 0
| 0.081522
| 1
| 0.092391
| false
| 0
| 0.070652
| 0.027174
| 0.222826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e43577db4ce37b9708732914de0c5a01c24639dc
| 311
|
py
|
Python
|
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
import requests
def post():
url1 = "http://165.227.106.113/post.php"
headers1 = {
'Host': '165.227.106.113'
}
data = {
'username': 'admin',
'password': '71urlkufpsdnlkadsf'
}
r1 = requests.post(url=url1, data=data, headers=headers1)
print(r1.text)
post()
| 17.277778
| 61
| 0.559486
| 36
| 311
| 4.833333
| 0.638889
| 0.068966
| 0.103448
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140969
| 0.270096
| 311
| 17
| 62
| 18.294118
| 0.625551
| 0
| 0
| 0
| 0
| 0
| 0.286174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.076923
| 0.076923
| 0
| 0.153846
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e43fd711dcd86e63949520216ee91e975352e431
| 10,839
|
py
|
Python
|
esp8266/main.py
|
0xSebin/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | 1
|
2021-03-28T16:24:23.000Z
|
2021-03-28T16:24:23.000Z
|
esp8266/main.py
|
5ebin-thomas/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | null | null | null |
esp8266/main.py
|
5ebin-thomas/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | 2
|
2018-02-15T17:27:34.000Z
|
2019-11-20T10:00:43.000Z
|
"""
Group -
SwimTime - Swim your way to success
"""
import ads1x15
import network
import time
import math
import machine
from umqtt.simple import MQTTClient
import micropython
from micropython import const
from machine import Pin
"""
Define constant values
"""
run = False
lapnr = 3 #default lap number
temp = 0.0
wifi_ssid = "Alfabeta"
wifi_pswd = "12345678"
server = "io.adafruit.com"
user = "kk2314"
passwd = "674d8794c84d49008c5e0092dc6be24b"
mqtt_temp = "kk2314/feeds/temp"
mqtt_time = "kk2314/feeds/time"
mqtt_rawdata = "kk2314/feeds/rawdata"
mqtt_control = "kk2314/feeds/control"
mqtt_stat = "kk2314/feeds/stat"
mqtt_debug = "kk2314/feeds/debug"
mqtt_tempalert = "kk2314/feeds/tempalert"
"""
Define pins for LED and buzzer
"""
red = Pin(0, Pin.OUT)
blue = Pin(2, Pin.OUT)
p12 = machine.Pin(12)
buzz = machine.PWM(p12)
#function to blink LED
def blink_LED(colour):
colour.off()
time.sleep_ms(50)
colour.on()
time.sleep_ms(50)
#setting up I2C for range finder/ set up ADC
i2c = machine.I2C(scl=machine.Pin(5), sda=machine.Pin(4), freq=100000)
adc = ads1x15.ADS1115(i2c)
adc.gain = 1 #ADS1015_REG_CONFIG_PGA_4_096V
#setting up I2C for temp sens
i2c_temp = machine.I2C(scl=machine.Pin(14), sda=machine.Pin(13), freq=100000)
#Received messages from subscriptions will be delivered to this callback
def sub_cb(topic, msg):
global state
global run
global lapnr
global temp
print((topic, msg))
#Check for messages only for the control topic
if topic == b"kk2314/feeds/control":
if msg == b"start":
run = True
elif msg.decode() == "temp":
get_temp()
payload_temp = "{}".format(temp)
c.publish(mqtt_temp,payload_temp)
print(temp)
else:
lapnr = int(msg)
print(lapnr)
"""
Connect to the wifi
"""
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
sta_if.scan()
sta_if.connect(wifi_ssid, wifi_pswd)
print('Connecting to Wi-Fi')
#while connecting blink LED and wait
while not sta_if.isconnected():
blink_LED(red)
pass
print('Wifi connected')
#Turn red LED on (active-low)
red.off()
# Turn off ESP8266's AP
ap_if = network.WLAN(network.AP_IF)
ap_if.active(False)
#Converts the data received from ultrasonic sensor into meters
def convert(data):
global distance
distance = data/10000
distance = distance/0.000976562 #vcc/(1025*5)
distance = (distance/1000)+0.16 #distance offset
#Send a read request and read information of temp sensor as well as convert temp into degree celcius
def get_temp():
global temp
i2c_temp.writeto(0x40, bytearray([0xf3]))
time.sleep(0.5)
data=i2c_temp.readfrom(0x40, 2)
tempraw=int.from_bytes(data, "big")
temp = 175.72 * tempraw / 65536
temp = temp - 46.85
#sets up the buzzer to run a countdown composed of 3 short beeps and a long one
def countdown():
count = 0
freq = 300
while count < 3:
buzz.freq(400)
buzz.duty(512)
time.sleep(0.7)
buzz.duty(1023)
time.sleep(0.7)
count = count + 1
buzz.freq(500)
buzz.duty(512)
time.sleep(1.25)
buzz.duty(1023)
#converts secs into min and seconds
def format(sec):
sec = sec/1000
mins, secs = divmod( sec, 60)
secs = round(secs, 3)
return (mins, secs)
#main() function which executes sensing and mqtt push
def main(server):
global run
global lapnr
global nr
global c
global mqttConnected
"""
Defines which client to connect to.
Using adafruit.io broker requires authentification
so we also set username and password
"""
c = MQTTClient("Sensor boards", server, user = user, password = passwd)
c.set_callback(sub_cb)
#sets flag for mqtt connected
if c.connect() == False:
mqttConnected = True
print('MQTT Connected')
#subscribe to the topic where controls are received
c.subscribe("kk2314/feeds/control")
while True:
if True:
c.wait_msg() #blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
else:
c.check_msg() #non-blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
c.disconnect()
if __name__ == "__main__":
main(server)
| 28.448819
| 110
| 0.658456
| 1,607
| 10,839
| 4.277536
| 0.194151
| 0.036078
| 0.054117
| 0.024731
| 0.628891
| 0.608816
| 0.604888
| 0.604888
| 0.604888
| 0.604888
| 0
| 0.035985
| 0.24624
| 10,839
| 381
| 111
| 28.448819
| 0.805386
| 0.203617
| 0
| 0.601626
| 0
| 0
| 0.149082
| 0.006482
| 0
| 0
| 0.00144
| 0
| 0
| 1
| 0.028455
| false
| 0.012195
| 0.036585
| 0
| 0.069106
| 0.03252
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e45a47a7a23107da9b1e4e894dbe004e6d56eaf1
| 2,933
|
py
|
Python
|
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
# defining a decorator
def hello_decorator(func):
# inner1 is a Wrapper function in which the argument is called inner function can access the outer local functions like in this case "func"
def inner1():
print("Hello, this is before function execution")
# calling the actual function now inside the wrapper function.
func()
print("This is after function execution")
return inner1
# defining a function, to be called inside wrapper
def function_to_be_used():
print("This is inside the function !!")
# passing 'function_to_be_used' inside the decorator to control its behaviour
function_to_be_used = hello_decorator(function_to_be_used)
# calling the function
function_to_be_used()
# find out the execution time of a function using a decorator
# importing libraries
import time
import math
# decorator to calculate duration # taken by any function.
def calculate_time(func):
# added arguments inside the inner1, if function takes any arguments, can be added like this.
def inner1(*args, **kwargs):
# storing time before function execution
begin = time.time()
func(*args, **kwargs)
# storing time after function execution
end = time.time()
print("Total time taken in : ", func.__name__, end - begin)
return inner1
# this can be added to any function present, in this case to calculate a factorial
@calculate_time
def factorial(num):
# sleep 2 seconds because it takes very less time so that you can see the actual difference
time.sleep(2)
print(math.factorial(num))
# calling the function.
factorial(10)
# Chaining Decorators
# code for testing decorator chaining
def decor1(func):
def inner():
x = func()
return x * x
return inner
def decor(func):
def inner():
x = func()
return 2 * x
return inner
@decor1
@decor
def num():
return 10
print(num())
# Decorators with parameters in Python
def decorator(*args, **kwargs):
print("Inside decorator")
def inner(func):
# code functionality here
print("Inside inner function")
print("I am", kwargs['name'])
func()
# returning inner function
return inner
@decorator(name="Ankit Gupta")
def my_func():
print("Inside actual function")
# Generator Function
# A generator function that yields 1 for first time, 2 second time and 3 third time
def simpleGeneratorFun():
yield 1
yield 2
yield 3
# Driver code to check above generator function
for value in simpleGeneratorFun():
print(value)
# A Python program to demonstrate use of generator object with next()
# A generator function
def simpleGeneratorFun():
yield 1
yield 2
yield 3
# x is a generator object
x = simpleGeneratorFun()
# Iterating over the generator object using next
print(x.next()) # In Python 3, __next__()
print(x.next())
print(x.next())
| 30.237113
| 144
| 0.691101
| 404
| 2,933
| 4.94802
| 0.299505
| 0.030015
| 0.036018
| 0.04002
| 0.062031
| 0.062031
| 0.03902
| 0.03902
| 0
| 0
| 0
| 0.011101
| 0.232185
| 2,933
| 96
| 145
| 30.552083
| 0.876554
| 0.455506
| 0
| 0.349206
| 0
| 0
| 0.137322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.253968
| false
| 0
| 0.031746
| 0.015873
| 0.412698
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.