hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f296a031d5f0c54dcf0daafc3b2597cd41d7d8ee
| 524
|
py
|
Python
|
sharedData.py
|
vidalmatheus/DS.com
|
47b8d3cbb6d9ecd30178c4ba76408191c0715866
|
[
"MIT"
] | null | null | null |
sharedData.py
|
vidalmatheus/DS.com
|
47b8d3cbb6d9ecd30178c4ba76408191c0715866
|
[
"MIT"
] | null | null | null |
sharedData.py
|
vidalmatheus/DS.com
|
47b8d3cbb6d9ecd30178c4ba76408191c0715866
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, request, redirect,Blueprint, json, url_for, session
from modules import dataBase,usuario
import psycopg2, os, subprocess, bcrypt
#
#def getData():
# DATABASE_URL = os.environ['DATABASE_URL']
# con = psycopg2.connect(DATABASE_URL, sslmode='require')
# return con
### connect to the dataBase
DATABASE_URL = os.environ['DATABASE_URL']
connectionData = dataBase.dataAccess()
####
###Usuario
usersDataOnline = usuario.acessManager()
#userData = usuario.acessoUser()
###
| 20.96
| 93
| 0.740458
| 60
| 524
| 6.35
| 0.583333
| 0.144357
| 0.068241
| 0.104987
| 0.16273
| 0.16273
| 0
| 0
| 0
| 0
| 0
| 0.004415
| 0.135496
| 524
| 24
| 94
| 21.833333
| 0.836645
| 0.370229
| 0
| 0
| 0
| 0
| 0.038961
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
f2995fcdd8762cd23c69c1f140cd16f1c0b58140
| 6,183
|
py
|
Python
|
merlin/analysis/sequential.py
|
greentea1079/MERlin
|
f4c50cb15722263ee9397561b9ce4b2eddc3d559
|
[
"MIT"
] | 14
|
2019-08-19T15:26:44.000Z
|
2022-01-12T16:38:42.000Z
|
merlin/analysis/sequential.py
|
greentea1079/MERlin
|
f4c50cb15722263ee9397561b9ce4b2eddc3d559
|
[
"MIT"
] | 60
|
2019-08-19T15:48:37.000Z
|
2021-11-11T19:19:18.000Z
|
merlin/analysis/sequential.py
|
epigen-UCSD/MERlin
|
3aa784fb28a2a4ebae92cfaf3a72f30a459daab9
|
[
"MIT"
] | 13
|
2019-08-16T06:03:23.000Z
|
2021-08-02T15:52:46.000Z
|
import pandas
import rtree
import networkx
import numpy as np
import cv2
from skimage.measure import regionprops
from merlin.core import analysistask
from merlin.util import imagefilters
class SumSignal(analysistask.ParallelAnalysisTask):
"""
An analysis task that calculates the signal intensity within the boundaries
of a cell for all rounds not used in the codebook, useful for measuring
RNA species that were stained individually.
"""
def __init__(self, dataSet, parameters=None, analysisName=None):
super().__init__(dataSet, parameters, analysisName)
if 'apply_highpass' not in self.parameters:
self.parameters['apply_highpass'] = False
if 'highpass_sigma' not in self.parameters:
self.parameters['highpass_sigma'] = 5
if 'z_index' not in self.parameters:
self.parameters['z_index'] = 0
if self.parameters['z_index'] >= len(self.dataSet.get_z_positions()):
raise analysistask.InvalidParameterException(
'Invalid z_index specified for %s. (%i > %i)'
% (self.analysisName, self.parameters['z_index'],
len(self.dataSet.get_z_positions())))
self.highpass = str(self.parameters['apply_highpass']).upper() == 'TRUE'
self.alignTask = self.dataSet.load_analysis_task(
self.parameters['global_align_task'])
def fragment_count(self):
return len(self.dataSet.get_fovs())
def get_estimated_memory(self):
return 2048
def get_estimated_time(self):
return 1
def get_dependencies(self):
return [self.parameters['warp_task'],
self.parameters['segment_task'],
self.parameters['global_align_task']]
def _extract_signal(self, cells, inputImage, zIndex) -> pandas.DataFrame:
cellCoords = []
for cell in cells:
regions = cell.get_boundaries()[zIndex]
if len(regions) == 0:
cellCoords.append([])
else:
pixels = []
for region in regions:
coords = region.exterior.coords.xy
xyZip = list(zip(coords[0].tolist(), coords[1].tolist()))
pixels.append(np.array(
self.alignTask.global_coordinates_to_fov(
cell.get_fov(), xyZip)))
cellCoords.append(pixels)
cellIDs = [str(cells[x].get_feature_id()) for x in range(len(cells))]
mask = np.zeros(inputImage.shape, np.uint8)
for i, cell in enumerate(cellCoords):
cv2.drawContours(mask, cell, -1, i+1, -1)
propsDict = {x.label: x for x in regionprops(mask, inputImage)}
propsOut = pandas.DataFrame(
data=[(propsDict[k].intensity_image.sum(),
propsDict[k].filled_area)
if k in propsDict else (0, 0)
for k in range(1, len(cellCoords) + 1)],
index=cellIDs,
columns=['Intensity', 'Pixels'])
return propsOut
def _get_sum_signal(self, fov, channels, zIndex):
fTask = self.dataSet.load_analysis_task(self.parameters['warp_task'])
sTask = self.dataSet.load_analysis_task(self.parameters['segment_task'])
cells = sTask.get_feature_database().read_features(fov)
signals = []
for ch in channels:
img = fTask.get_aligned_image(fov, ch, zIndex)
if self.highpass:
highPassSigma = self.parameters['highpass_sigma']
highPassFilterSize = int(2 * np.ceil(3 * highPassSigma) + 1)
img = imagefilters.high_pass_filter(img,
highPassFilterSize,
highPassSigma)
signals.append(self._extract_signal(cells, img,
zIndex).iloc[:, [0]])
# adding num of pixels
signals.append(self._extract_signal(cells, img, zIndex).iloc[:, [1]])
compiledSignal = pandas.concat(signals, 1)
compiledSignal.columns = channels+['Pixels']
return compiledSignal
def get_sum_signals(self, fov: int = None) -> pandas.DataFrame:
"""Retrieve the sum signals calculated from this analysis task.
Args:
fov: the fov to get the sum signals for. If not specified, the
sum signals for all fovs are returned.
Returns:
A pandas data frame containing the sum signal information.
"""
if fov is None:
return pandas.concat(
[self.get_sum_signals(fov) for fov in self.dataSet.get_fovs()]
)
return self.dataSet.load_dataframe_from_csv(
'sequential_signal', self.get_analysis_name(),
fov, 'signals', index_col=0)
def _run_analysis(self, fragmentIndex):
zIndex = int(self.parameters['z_index'])
channels, geneNames = self.dataSet.get_data_organization()\
.get_sequential_rounds()
fovSignal = self._get_sum_signal(fragmentIndex, channels, zIndex)
normSignal = fovSignal.iloc[:, :-1].div(fovSignal.loc[:, 'Pixels'], 0)
normSignal.columns = geneNames
self.dataSet.save_dataframe_to_csv(
normSignal, 'sequential_signal', self.get_analysis_name(),
fragmentIndex, 'signals')
class ExportSumSignals(analysistask.AnalysisTask):
def __init__(self, dataSet, parameters=None, analysisName=None):
super().__init__(dataSet, parameters, analysisName)
def get_estimated_memory(self):
return 2048
def get_estimated_time(self):
return 5
def get_dependencies(self):
return [self.parameters['sequential_task']]
def _run_analysis(self):
sTask = self.dataSet.load_analysis_task(
self.parameters['sequential_task'])
signals = sTask.get_sum_signals()
self.dataSet.save_dataframe_to_csv(
signals, 'sequential_sum_signals',
self.get_analysis_name())
| 37.472727
| 80
| 0.603105
| 673
| 6,183
| 5.355126
| 0.273403
| 0.073807
| 0.029967
| 0.022198
| 0.295505
| 0.285516
| 0.222531
| 0.161487
| 0.13596
| 0.109323
| 0
| 0.00784
| 0.298561
| 6,183
| 164
| 81
| 37.70122
| 0.82315
| 0.075691
| 0
| 0.12069
| 0
| 0
| 0.06546
| 0.003903
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12069
| false
| 0.094828
| 0.068966
| 0.060345
| 0.301724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f2a5d347767b990fa97d063da0ee6a2aa890bd9d
| 2,757
|
py
|
Python
|
run.py
|
mishel254/py-password-locker
|
c14dd314251f078125df39104b99384c8cbd292b
|
[
"MIT"
] | null | null | null |
run.py
|
mishel254/py-password-locker
|
c14dd314251f078125df39104b99384c8cbd292b
|
[
"MIT"
] | null | null | null |
run.py
|
mishel254/py-password-locker
|
c14dd314251f078125df39104b99384c8cbd292b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.8
from passwords import Credentials
from login import accounts
import random
#create credentials func
def create(fname,lname,user,passwords,email):
newCredentials = Credentials(fname,lname,user,passwords,email)
return newCredentials
#delete
'''
function to delete credentials & accounts
'''
def delete(credentials):
credentials.delete()
def deleteAccount(accounts):
accounts.deleteAccount()
'''
save credentials & accounts
'''
def saveCredentials(Credentials):
Credentials.saveCredentials()
def saveAccounts(accounts):
accounts.saveAccounts()
'''
search credentials
'''
def auth_user(email):
return Credentials.auth_by_email
'''
check if contact exist
'''
def account_exists(email):
return Credentials.accounts_display(email)
'''
display
'''
def display_all_users():
return accounts.display_all_users
def main():
print('Your name?')
username = input()
code = input(f'Press Enter {username}')
while True:
print("Use these short codes :")
print("cc - create a new contact")
print("dc - display contacts")
print("fc -find a contact")
print("ex -exit the contact list")
print("del-to delete ")
short_code = input()
if short_code == 'cc':
print('First name:')
fname = input()
print('last name:')
lname = input()
print('username')
username = input()
print('email:')
email = input()
print('password:')
passwords = input(round(random.random()))
saveCredentials(create(fname,lname,username,passwords,email))
print(f'Your data has been taken d{fname}')
elif short_code == 'dc':
if display_all_users():
print('users:')
for account in display_all_users():
print(f'{account.username} {account.email}')
elif short_code == 'fc':
print('enter email address to search')
search = input()
if accounts_exists(email):
auth_by_email = find_contact(search)
print(f'{search.first}')
else:
print('NO credentials found!')
elif short_code == 'del':
print('input Y confirm')
confirm = input()
if delete():
credentials.delete()
print('credential deleted')
else:
print('credentials not found')
elif short_code == 'ex':
print('happy coding!')
break
else:
print('Not an existing shortcut')
if __name__ == '__main__':
main()
| 22.056
| 73
| 0.573087
| 283
| 2,757
| 5.473498
| 0.335689
| 0.034861
| 0.038735
| 0.029697
| 0.036152
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001057
| 0.313384
| 2,757
| 124
| 74
| 22.233871
| 0.817221
| 0.018861
| 0
| 0.09589
| 0
| 0
| 0.176817
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123288
| false
| 0.082192
| 0.041096
| 0.041096
| 0.219178
| 0.315068
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
f2ab54aefe1c397702c020ba41c25aedb91b9d9b
| 555
|
py
|
Python
|
setup.py
|
akvatol/CosmOrc
|
6ee1e1f3521a6d2b4c8eec104fa4e93db32d9352
|
[
"MIT"
] | 1
|
2018-12-07T17:21:39.000Z
|
2018-12-07T17:21:39.000Z
|
setup.py
|
akvatol/CosmOrc
|
6ee1e1f3521a6d2b4c8eec104fa4e93db32d9352
|
[
"MIT"
] | 8
|
2018-11-23T10:05:01.000Z
|
2019-04-09T19:17:43.000Z
|
setup.py
|
akvatol/CosmOrc
|
6ee1e1f3521a6d2b4c8eec104fa4e93db32d9352
|
[
"MIT"
] | 1
|
2018-12-07T17:21:40.000Z
|
2018-12-07T17:21:40.000Z
|
from setuptools import setup, find_packages
setup(
name='CosmOrc',
version='0.1',
include_package_data=True,
packages=find_packages(),
python_requires='>=3.6',
install_requires=[
'Click==7.0',
'numpy==1.16.2',
'pandas==0.24.2',
'pyaml==19.4.1',
'PySnooper==0.2.8',
'python-dateutil==2.8.0',
'pytz==2019.3',
'PyYAML==5.1.2',
'six==1.12.0',
'typing==3.7.4.1',
],
entry_points='''
[console_scripts]
CosmOrc = main:cli
''',
)
| 21.346154
| 43
| 0.506306
| 72
| 555
| 3.791667
| 0.638889
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 0.293694
| 555
| 25
| 44
| 22.2
| 0.594388
| 0
| 0
| 0
| 0
| 0
| 0.381982
| 0.03964
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f2bbbde7ac14cbda28bc8fe761c19a1e71889708
| 2,808
|
py
|
Python
|
pfrock/cli/config_parser.py
|
knightliao/pfrock
|
33587f11caeeccc11d0b8219b4e02df153905486
|
[
"Apache-2.0"
] | 62
|
2016-02-24T10:47:17.000Z
|
2019-04-27T01:36:56.000Z
|
pfrock/cli/config_parser.py
|
knightliao/pfrock
|
33587f11caeeccc11d0b8219b4e02df153905486
|
[
"Apache-2.0"
] | 1
|
2019-04-19T12:13:21.000Z
|
2021-08-10T09:16:09.000Z
|
pfrock/cli/config_parser.py
|
knightliao/pfrock
|
33587f11caeeccc11d0b8219b4e02df153905486
|
[
"Apache-2.0"
] | 24
|
2016-03-01T14:59:29.000Z
|
2019-09-02T08:12:00.000Z
|
# !/usr/bin/env python
# coding=utf8
import json
import traceback
from tornado.web import RequestHandler
from pfrock.cli import logger
from pfrock.core.constants import PFROCK_CONFIG_SERVER, PFROCK_CONFIG_ROUTER, PFROCK_CONFIG_PORT, ROUTER_METHOD, \
ROUTER_PATH, ROUTER_OPTIONS, ROUTER_HANDLER
from pfrock.core.lib import auto_str
@auto_str
class PfrockConfigRouter(object):
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS
def __init__(self, path, methods, handler, options={}):
self.path = path
self.handler = handler
self.options = options
self.methods = []
if methods == "any":
self.methods = []
else:
for method in methods:
method = method.upper()
if method in self.SUPPORTED_METHODS:
self.methods.append(method)
@auto_str
class PfrockConfigServer(object):
def __init__(self, routes, port):
self.routes = routes
self.port = port
class PfrockConfigParser(object):
@classmethod
def _parse_router(cls, router):
path = router[ROUTER_PATH] if ROUTER_PATH in router else None
methods = router[ROUTER_METHOD] if ROUTER_METHOD in router else []
handler = router[ROUTER_HANDLER] if ROUTER_HANDLER in router else None
options = router[ROUTER_OPTIONS] if ROUTER_OPTIONS in router else None
if path and handler:
return PfrockConfigRouter(path, methods, handler, options)
return None
@classmethod
def _parse_routers(cls, routers):
router_list = []
for router in routers:
router = cls._parse_router(router)
if router:
router_list.append(router)
return router_list
@classmethod
def _parse_servers(cls, server):
port = server[PFROCK_CONFIG_PORT] if PFROCK_CONFIG_PORT in server else None
routers = cls._parse_routers(server[PFROCK_CONFIG_ROUTER]) if PFROCK_CONFIG_ROUTER in server else None
if port and routers:
return PfrockConfigServer(routers, port)
@classmethod
def do(cls, config_file_path):
with open(config_file_path, 'r') as fin:
try:
config_data = json.load(fin)
except:
logger.error("%s not well formed \n%s" % (config_file_path, traceback.format_exc()))
return None
config_servers = config_data[PFROCK_CONFIG_SERVER] if PFROCK_CONFIG_SERVER in config_data else None
if config_servers:
for config_server in config_servers:
config_server = cls._parse_servers(config_server)
# todo: dev version just support one server
return config_server
return None
| 33.428571
| 114
| 0.649217
| 331
| 2,808
| 5.277946
| 0.244713
| 0.06182
| 0.027476
| 0.027476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000498
| 0.284544
| 2,808
| 83
| 115
| 33.831325
| 0.869089
| 0.026353
| 0
| 0.169231
| 0
| 0
| 0.00989
| 0
| 0
| 0
| 0
| 0.012048
| 0
| 1
| 0.092308
| false
| 0
| 0.092308
| 0
| 0.353846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
f2ca33e35faaa3a6ab066c758e3c492f242feea7
| 633
|
py
|
Python
|
lesson_3_set.py
|
pis2pis2/pis2pis2
|
a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f
|
[
"MIT"
] | null | null | null |
lesson_3_set.py
|
pis2pis2/pis2pis2
|
a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f
|
[
"MIT"
] | null | null | null |
lesson_3_set.py
|
pis2pis2/pis2pis2
|
a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f
|
[
"MIT"
] | 4
|
2019-11-12T06:59:35.000Z
|
2021-01-29T21:34:15.000Z
|
# Тип данных МНОЖЕСТВО (set)------------------------
#------------------------------------------
# Инициализация
temp_set = {1,2,3}
print(type(temp_set), temp_set)
temp_list = [1,2,1,2,2,3,4,12,32]
temp_set = set(temp_list)
print(type(temp_set), temp_set)
# Обращения к элементам множества
print(100 in temp_set)
for element in temp_set:
print(element)
# Функции с множествами
#----------
# Операции с множествами
# Методы
my_set_1 = set([1, 2, 3, 4, 5])
my_set_2 = set([5, 6, 7, 8, 9])
my_set_3 = my_set_1.union(my_set_2)
print(my_set_3)
my_set_4 = my_set_1.difference(my_set_2)
print(my_set_4)
| 20.419355
| 52
| 0.598736
| 107
| 633
| 3.261682
| 0.336449
| 0.143266
| 0.094556
| 0.034384
| 0.272206
| 0.223496
| 0
| 0
| 0
| 0
| 0
| 0.069943
| 0.164297
| 633
| 31
| 53
| 20.419355
| 0.589792
| 0.315956
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
4b3d9db6b58f7211471a5f7c96ec4eb5f14b1e04
| 958
|
py
|
Python
|
backend/app/exceptions/exceptions.py
|
Michal-Miko/competitive-teams
|
6bb55542e06121f413248ddf0b75285296b610bb
|
[
"MIT"
] | null | null | null |
backend/app/exceptions/exceptions.py
|
Michal-Miko/competitive-teams
|
6bb55542e06121f413248ddf0b75285296b610bb
|
[
"MIT"
] | null | null | null |
backend/app/exceptions/exceptions.py
|
Michal-Miko/competitive-teams
|
6bb55542e06121f413248ddf0b75285296b610bb
|
[
"MIT"
] | null | null | null |
from app.database import crud
from fastapi import HTTPException, status
def check_for_team_existence(db, team_id):
if crud.get_team(db, team_id=team_id) is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Team {} not found".format(team_id))
def check_for_player_existence(db, player_id):
if crud.get_player(db, player_id=player_id) is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Player {} not found".format(player_id))
def check_for_match_existence(db, match_id):
if crud.get_match(db, match_id=match_id) is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Match {} not found".format(match_id))
def check_for_tournament_existence(db, tournament_id):
if crud.get_tournament(db, tournament_id=tournament_id) is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Match {} not found".format(tournament_id))
| 41.652174
| 117
| 0.775574
| 150
| 958
| 4.633333
| 0.2
| 0.092086
| 0.063309
| 0.063309
| 0.417266
| 0.417266
| 0.417266
| 0.417266
| 0.417266
| 0.417266
| 0
| 0.014337
| 0.126305
| 958
| 22
| 118
| 43.545455
| 0.81601
| 0
| 0
| 0
| 0
| 0
| 0.075157
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b48a1ce648ccd7eddf1077ee9304a100d815be4
| 581
|
py
|
Python
|
day6_10/day6b.py
|
invincible-akshay/advent_of_code2020
|
81f207c6f7218ff235c31d67e1b4659cc482297c
|
[
"MIT"
] | null | null | null |
day6_10/day6b.py
|
invincible-akshay/advent_of_code2020
|
81f207c6f7218ff235c31d67e1b4659cc482297c
|
[
"MIT"
] | null | null | null |
day6_10/day6b.py
|
invincible-akshay/advent_of_code2020
|
81f207c6f7218ff235c31d67e1b4659cc482297c
|
[
"MIT"
] | null | null | null |
import utils.fileutils as futils
inp = futils.read_list("../data/day6.txt")
nums_dict = dict()
group_size, res_count = 0, 0
for line in inp:
if line == "":
# res_count += len(nums_set)
for k, v in nums_dict.items():
if v == group_size:
res_count += 1
nums_dict = dict()
group_size = 0
continue
group_size += 1
for ch in line:
nums_dict[ch] = 1 + nums_dict.get(ch, 0)
for k, v in nums_dict.items():
if v == group_size:
res_count += 1
print("Sum of counts: {0}".format(res_count))
| 25.26087
| 48
| 0.567986
| 91
| 581
| 3.428571
| 0.395604
| 0.153846
| 0.115385
| 0.163462
| 0.397436
| 0.262821
| 0.262821
| 0.262821
| 0.262821
| 0.262821
| 0
| 0.024752
| 0.304647
| 581
| 22
| 49
| 26.409091
| 0.747525
| 0.04475
| 0
| 0.421053
| 0
| 0
| 0.061483
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b4dceb98d231438a803f497f8f31de32f299051
| 241
|
py
|
Python
|
sps_demo/accounts/api/serializers.py
|
JuanDM93/sps_django
|
df47c7ee63a1e99468644a6f428a6cdabc7ac6ae
|
[
"MIT"
] | null | null | null |
sps_demo/accounts/api/serializers.py
|
JuanDM93/sps_django
|
df47c7ee63a1e99468644a6f428a6cdabc7ac6ae
|
[
"MIT"
] | 1
|
2021-07-27T06:46:05.000Z
|
2021-07-27T06:46:05.000Z
|
sps_demo/accounts/api/serializers.py
|
JuanDM93/sps_django
|
df47c7ee63a1e99468644a6f428a6cdabc7ac6ae
|
[
"MIT"
] | null | null | null |
from rest_framework.serializers import ModelSerializer
from accounts.models import Account
class AccountSerializer(ModelSerializer):
class Meta:
model = Account
fields = [
'account_id', 'limit',
]
| 21.909091
| 54
| 0.6639
| 22
| 241
| 7.181818
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26971
| 241
| 10
| 55
| 24.1
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0.062241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b53328e075db009dbb8d21c3c121da0a2ce955a
| 476
|
py
|
Python
|
qcodes/instrument_drivers/rohde_schwarz/HMC8042.py
|
LGruenhaupt/Qcodes
|
ffb74dae53c13c4885e61b5a2df3f833d524de04
|
[
"MIT"
] | 1
|
2019-12-07T01:25:49.000Z
|
2019-12-07T01:25:49.000Z
|
qcodes/instrument_drivers/rohde_schwarz/HMC8042.py
|
Dominik-Vogel/Qcodes
|
b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb
|
[
"MIT"
] | 12
|
2020-10-13T16:53:37.000Z
|
2020-10-14T17:16:22.000Z
|
qcodes/instrument_drivers/rohde_schwarz/HMC8042.py
|
Dominik-Vogel/Qcodes
|
b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb
|
[
"MIT"
] | 1
|
2020-05-03T22:47:40.000Z
|
2020-05-03T22:47:40.000Z
|
from .private.HMC804x import _RohdeSchwarzHMC804x
from qcodes.utils.deprecate import deprecate_moved_to_qcd
@deprecate_moved_to_qcd(alternative="qcodes_contrib_drivers.drivers.RohdeSchwarz.HMC8042.RohdeSchwarzHMC8042")
class RohdeSchwarzHMC8042(_RohdeSchwarzHMC804x):
"""
This is the qcodes driver for the Rohde & Schwarz HMC8042 Power Supply
"""
def __init__(self, name, address, **kwargs):
super().__init__(name, address, num_channels=2, **kwargs)
| 39.666667
| 110
| 0.781513
| 55
| 476
| 6.418182
| 0.672727
| 0.07932
| 0.090652
| 0.107649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062651
| 0.128151
| 476
| 11
| 111
| 43.272727
| 0.787952
| 0.147059
| 0
| 0
| 0
| 0
| 0.182051
| 0.182051
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
4b5c74257ca507c7289c021413a4bdff6ed7d6a6
| 2,865
|
py
|
Python
|
Python/061.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | null | null | null |
Python/061.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | 1
|
2018-04-16T21:01:50.000Z
|
2018-04-16T21:01:50.000Z
|
Python/061.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Solution to Project Euler problem X
Author: Jaime Liew
https://github.com/jaimeliew1/Project_Euler_Solutions
"""
import math
def isTri(n):
return ((math.sqrt(1+8*n)-1)/2).is_integer()
def isSqr(n):
return (math.sqrt(n)).is_integer()
def isPent(n):
return ((1+math.sqrt(1+24*n))/6).is_integer()
def isHex(n):
return ((1+math.sqrt(1+8*n))/4).is_integer()
def isHept(n):
return ((3+math.sqrt(9+40*n))/10).is_integer()
def isOct(n):
return ((2+math.sqrt(4+12*n))/6).is_integer()
isPoly = [isTri, isSqr, isPent, isHex,isHept,isOct]
class Jnum:
id = 0 #each nth bit is 1 if it is an nGon number
n = 0
isMultiPoly = False
def __init__(self, num):
self.n = num
for i in (f(num) for f in isPoly):
self.id = (self.id << 1) | i
if bin(self.id).count('1') > 1:
self.isMultiPoly = True
def __eq__(self,other):
return self.n == other.n
def __ne__(self,other):
return self.n != other.n
def checkThisSet(thisSet,depth,maxDepth, numSet):
for q in (q for q in numSet if q not in thisSet):
workingBit = 0
qIsCandidate = True
if str(thisSet[-1].n)[2:] == str(q.n)[:2]: #if cyclical
workingBit = 0
for i in (thisSet + [q]):
if workingBit & (i.id) == 0:
workingBit |= (i.id)
else:
qIsCandidate = False
break
else:
qIsCandidate = False
if qIsCandidate:
if depth == maxDepth-1:
if str(thisSet[0].n)[:2] == str(q.n)[2:]: #if cyclical back to start
return list(thisSet + [q])
else:
return [Jnum(0)]
furtherTesting = checkThisSet(list(thisSet + [q]),depth +1, maxDepth, numSet)
if furtherTesting != [Jnum(0)]:
return furtherTesting
return [Jnum(0)]
def run():
### generate set of possible candidates
numSet = []
for i in range(1000, 10000):
a = Jnum(i)
if a.id != 0:
if a.isMultiPoly:
temp = a
for k, bit in enumerate(bin(a.id)[2:].zfill(6)[::-1]):
if bit == '1':
temp.id = 1<<k
numSet.append(Jnum(a.n))
numSet[-1].id = 1<<k
else:
numSet.append(a)
#print("there are ",len(numSet)," candidate numbers.\n")
### Recursive search loop
for i in numSet:
currentSet = checkThisSet(list([i]), 1, 6, numSet)
if currentSet != [Jnum(0)]:
break
Sum = 0
for i in currentSet:
#print(i.n, bin(i.id)[2:].zfill(6))
Sum += i.n
return Sum
if __name__ == "__main__":
print(run())
| 25.131579
| 89
| 0.506806
| 386
| 2,865
| 3.689119
| 0.295337
| 0.03441
| 0.042135
| 0.021067
| 0.099017
| 0.089888
| 0.066011
| 0.066011
| 0
| 0
| 0
| 0.036938
| 0.347993
| 2,865
| 113
| 90
| 25.353982
| 0.725375
| 0.124258
| 0
| 0.157895
| 0
| 0
| 0.004018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144737
| false
| 0
| 0.013158
| 0.105263
| 0.381579
| 0.013158
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
4b624ab13f54c8cfd7032b48000491920f6d9a27
| 5,581
|
py
|
Python
|
web_spider/web_spider/pipelines.py
|
syun0216/simple_ci
|
83d31cb04357fe0bd428ab8f09c2db81a06eb723
|
[
"MIT"
] | null | null | null |
web_spider/web_spider/pipelines.py
|
syun0216/simple_ci
|
83d31cb04357fe0bd428ab8f09c2db81a06eb723
|
[
"MIT"
] | null | null | null |
web_spider/web_spider/pipelines.py
|
syun0216/simple_ci
|
83d31cb04357fe0bd428ab8f09c2db81a06eb723
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
class WebSpiderPipeline(object):
def init_insert_db(self,key,table_name):
pass
def process_item(self, item, spider):
# print(item['name'])
connection = pymysql.connect(host='127.0.0.1',
user='root',
password='123456',
db='mydb',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
if item['type'] == 'toutiao':
insert_sql = """INSERT INTO `dongqiudi` (`id`, `name`,`url`,`time`,`comment`,`image`)
VALUES (%s, %s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
id=VALUES(id),
name=VALUES(name),
url=VALUES (url),
time=VALUES (time),
comment=VALUES (comment),
image=VALUES (image)"""
cursor.execute(insert_sql, (item['id'], item['name'], item['url'], item['time'], item['comment'], item['image']))
elif item['type'] == 'rank':
insert_sql = """INSERT INTO `rank` (`rank`,`team_avatar`,`team_name`,`round`,`win`,`draw`,`lost`,`goal`,`fumble`,`GD`,`integral`,`rel`,`rel_name`)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
rank=VALUES (rank),
team_avatar=VALUES (team_avatar),
team_name=VALUES (team_name),
round=VALUES (round),
win=VALUES (win),
draw=VALUES (draw),
lost=VALUES (lost),
goal=VALUES (goal),
fumble=VALUES (fumble),
GD=VALUES (GD),
integral=VALUES (integral),
rel=VALUES (rel),
rel_name=VALUES (rel_name)
"""
cursor.execute(insert_sql,
(item['rank'], item['team_avatar'], item['team_name'], item['round'], item['win'], item['draw'],item['lost'],item['goal'],item['fumble'],item['GD'],item['integral'],item['rel'],item['rel_name']))
elif item['type'] == 'goal':
insert_sql = """INSERT INTO `player_goal_rank` (`rank`,`data`,`player_avatar`,`player_name`,`team_avatar`,`team_name`,`rel`,`rel_name`)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
rank=VALUES (rank),
data=VALUES (data),
player_avatar=VALUES (player_avatar),
player_name=VALUES (player_name),
team_avatar=VALUES (team_avatar),
team_name=VALUES (team_name),
rel=VALUES (rel),
rel_name=VALUES (rel_name)
"""
cursor.execute(insert_sql,
(item['rank'], item['data'], item['player_avatar'], item['player_name'],item['team_avatar'], item['team_name'], item['rel'], item['rel_name']))
elif item['type'] == 'assist':
insert_sql = """INSERT INTO `player_assist_rank` (`rank`,`data`,`player_avatar`,`player_name`,`team_avatar`,`team_name`,`rel`,`rel_name`)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
rank=VALUES (rank),
data=VALUES (data),
player_avatar=VALUES (player_avatar),
player_name=VALUES (player_name),
team_avatar=VALUES (team_avatar),
team_name=VALUES (team_name),
rel=VALUES (rel),
rel_name=VALUES (rel_name)
"""
cursor.execute(insert_sql,
(item['rank'], item['data'], item['player_avatar'], item['player_name'],item['team_avatar'], item['team_name'], item['rel'], item['rel_name']))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
pass
| 52.650943
| 226
| 0.392403
| 477
| 5,581
| 4.446541
| 0.234801
| 0.029231
| 0.03819
| 0.043376
| 0.517209
| 0.481377
| 0.481377
| 0.466289
| 0.446488
| 0.446488
| 0
| 0.005228
| 0.485934
| 5,581
| 105
| 227
| 53.152381
| 0.734054
| 0.054291
| 0
| 0.474359
| 0
| 0.051282
| 0.688686
| 0.078778
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0.038462
| 0.012821
| 0
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b6b2b2466b7f50264d915b0b9ab9925c879719e
| 587
|
py
|
Python
|
kora/install/blender.py
|
wannaphong/kora
|
8a9034097d07b14094e077769c02a0b4857d179b
|
[
"MIT"
] | 91
|
2020-05-26T05:54:51.000Z
|
2022-03-09T07:33:44.000Z
|
kora/install/blender.py
|
wannaphong/kora
|
8a9034097d07b14094e077769c02a0b4857d179b
|
[
"MIT"
] | 12
|
2020-10-03T10:09:11.000Z
|
2021-03-06T23:12:21.000Z
|
kora/install/blender.py
|
wannaphong/kora
|
8a9034097d07b14094e077769c02a0b4857d179b
|
[
"MIT"
] | 16
|
2020-07-07T18:39:29.000Z
|
2021-03-06T03:46:49.000Z
|
import os
from IPython import get_ipython
# need this fix first
os.environ["LD_PRELOAD"] = ""
os.system("apt remove libtcmalloc-minimal4")
os.system("apt install libtcmalloc-minimal4")
os.environ["LD_PRELOAD"] = "/usr/lib/x86_64-linux-gnu/libtcmalloc_minimal.so.4.3.0"
os.system("dpkg -L libtcmalloc-minimal4")
# then install blender
url = "https://download.blender.org/release/Blender2.83/blender-2.83.0-linux64.tar.xz"
os.system(f"curl {url} | tar xJ")
os.system("ln -s /content/blender-2.83.0-linux64/blender /usr/local/bin/blender")
# show result
get_ipython().system("blender -v")
| 36.6875
| 86
| 0.749574
| 96
| 587
| 4.520833
| 0.572917
| 0.092166
| 0.050691
| 0.082949
| 0.082949
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046382
| 0.081772
| 587
| 16
| 87
| 36.6875
| 0.758813
| 0.088586
| 0
| 0
| 0
| 0.181818
| 0.639098
| 0.216165
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.181818
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b7d6c918015930582e1fb1d514d24f1d777be05
| 1,411
|
py
|
Python
|
molecool_test/tests/test_molecule.py
|
radifar/molecool_test
|
9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9
|
[
"BSD-3-Clause"
] | null | null | null |
molecool_test/tests/test_molecule.py
|
radifar/molecool_test
|
9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9
|
[
"BSD-3-Clause"
] | null | null | null |
molecool_test/tests/test_molecule.py
|
radifar/molecool_test
|
9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pytest
import molecool_test
@pytest.fixture
def methane_molecule():
symbols = np.array(['C', 'H', 'H', 'H', 'H'])
coordinates = np.array([
[1, 1, 1],
[2.4, 1, 1],
[-0.4, 1, 1],
[1, 1, 2.4],
[1, 1, -0.4],
])
return symbols, coordinates
def test_move_methane(methane_molecule):
symbols, coordinates = methane_molecule
coordinates[0] += 5
def test_build_bond_list(methane_molecule):
symbols, coordinates = methane_molecule
bonds = molecool_test.build_bond_list(coordinates)
assert len(bonds) == 4
for bond_length in bonds.values():
assert bond_length == 1.4
def test_build_bond_failure(methane_molecule):
symbols, coordinates = methane_molecule
with pytest.raises(ValueError):
bonds = molecool_test.build_bond_list(coordinates, min_bond=-1)
def test_molecular_mass(methane_molecule):
symbols, coordinates = methane_molecule
calculated_mass = molecool_test.calculate_molecular_mass(symbols)
actual_mass = 16.04
assert pytest.approx(actual_mass, abs=1e-2) == calculated_mass
def test_center_of_mass(methane_molecule):
symbols, coordinates = methane_molecule
center_of_mass = molecool_test.calculate_center_of_mass(symbols, coordinates)
expected_center = np.array([1,1,1])
assert np.array_equal(center_of_mass, expected_center)
| 21.707692
| 81
| 0.697378
| 188
| 1,411
| 4.957447
| 0.276596
| 0.177039
| 0.141631
| 0.177039
| 0.391631
| 0.373391
| 0.218884
| 0.019313
| 0.019313
| 0
| 0
| 0.030009
| 0.197023
| 1,411
| 64
| 82
| 22.046875
| 0.792586
| 0
| 0
| 0.135135
| 0
| 0
| 0.003544
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 1
| 0.162162
| false
| 0
| 0.081081
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b7fc93c2e30ca54b02519e2a781a191d7e736a1
| 6,705
|
py
|
Python
|
pochta/tracking.py
|
john-phonk/fs-pochta-api
|
c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d
|
[
"MIT"
] | 16
|
2019-05-13T01:12:10.000Z
|
2022-01-17T06:21:35.000Z
|
pochta/tracking.py
|
john-phonk/fs-pochta-api
|
c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d
|
[
"MIT"
] | 4
|
2020-03-06T06:46:35.000Z
|
2020-11-22T04:24:34.000Z
|
pochta/tracking.py
|
john-phonk/fs-pochta-api
|
c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d
|
[
"MIT"
] | 6
|
2019-08-10T13:18:21.000Z
|
2021-11-25T08:57:30.000Z
|
from abc import ABC
from typing import List
from zeep import CachingClient, Client, Settings
from .exceptions import APIError
class _BaseClient(ABC):
"""API клиент сервиса отслеживания посылок.
https://tracking.pochta.ru/specification
"""
WSDL = ''
def __init__(self, login: str, password: str, caching=True):
"""Инициализация API клиента сервиса отслеживания посылок.
:param login: Логин от системы трекинга
:param password: Пароль от системы трекинга
:param caching: Флаг, позволяющий отключить кэширование в zeep
"""
self._login = login
self._password = password
zeep_client = CachingClient if caching else Client
self._client = zeep_client(
self.WSDL,
settings=Settings(strict=False),
)
class SingleTracker(_BaseClient):
"""Клиент для взаимодеействия с API единичной обработки запросов."""
WSDL = 'https://tracking.russianpost.ru/rtm34?wsdl'
def get_history(self, barcode: str) -> dict:
"""
История операций над отправлением.
Метод getOperationHistory используется для получения информации о
конкретном отправлении. Метод возвращает подробную информацию
по всем операциям, совершенным над отправлением.
https://tracking.pochta.ru/specification#getOperationHistory
:param barcode: Идентификатор регистрируемого почтового отправления в одном из форматов:
- внутрироссийский, состоящий из 14 символов (цифровой)
- международный, состоящий из 13 символов (буквенно-цифровой) в формате S10.
:return: Ответ метода getOperationHistory содержит список элементов
historyRecord. Каждый из них содержит информацию об одной операции над
отправлением. Если над отправлением еще не зарегистрировано ни одной
операции, то возвращается пустой список элементов historyRecord.
"""
return self._client.service.getOperationHistory(
OperationHistoryRequest={
'Barcode': barcode,
'MessageType': '0'
},
AuthorizationHeader={
'login': self._login,
'password': self._password,
},
)
def get_order_events_for_mail(self, barcode: str) -> dict:
"""
История операций с наложенным платежом.
Метод PostalOrderEventsForMail позволяет получить информацию об операциях с
наложенным платежом, который связан с конкретным почтовым отправлением.
https://tracking.pochta.ru/specification#PostalOrderEventsForMail
:param barcode: Идентификатор регистрируемого почтового отправления в одном из форматов:
- внутрироссийский, состоящий из 14 символов (цифровой);
- международный, состоящий из 13 символов (буквенно-цифровой) в формате S10.
:return: Список событий
"""
return self._client.service.PostalOrderEventsForMail(
PostalOrderEventsForMailInput={
'Barcode': barcode,
},
AuthorizationHeader={
'login': self._login,
'password': self._password,
},
)
class BatchTracker(_BaseClient):
"""Клиент для взаимодеействия с API пакетной обработки запросов."""
WSDL = 'https://tracking.russianpost.ru/fc?wsdl'
def get_ticket(self, barcodes: List[str]) -> str:
"""Получения билета на подготовку информации по списку идентификаторов отправлений.
Метод getTicket используется для получения билета
на подготовку информации по списку идентификаторов отправлений.
В запросе передается список идентификаторов отправлений.
При успешном вызове метод возвращает идентификатор билета.
Ограничения и рекомендации по использованию:
- Количество идентификаторов отправлений в одном запросе не должно превышать *3000*.
- Рекомендуется выполнять первое обращение за ответом по билету не ранее,
чем через 15 минут от момента выдачи билета.
- В случае неготовности результата повторные обращения по тому же билету следует
выполнять не чаще, чем 1 раз в 15 минут
- Время хранения ответа по билету в Сервисе отслеживания составляет 32 часа.
По истечении этого периода ответ удаляется.
https://tracking.pochta.ru/specification раздел "Пакетная обработка" п.3
:param barcodes: Идентификаторы регистрируемых почтовогых отправлений в одном из форматов:
- внутрироссийский, состоящий из 14 символов (цифровой)
- международный, состоящий из 13 символов (буквенно-цифровой) в формате S10.
:return: Ответ метода getTicket содержит информацию о выданном билете в объекте
ticketResponse в случае успешного запроса, функция возвращает номер созданного ticket,
полученного из ticketResponse.value
"""
# По умолчанию zeep генерирует Request старой версии,
# где запрос отправляется в виде файла с метаданными
# Поэтому, вручную создаём объект Request и убираем аттрибуты, относящиеся к файлу
request = self._client.get_type('{http://fclient.russianpost.org}file')
request.attributes.clear()
items = [{'Barcode': barcode} for barcode in barcodes]
response = self._client.service.getTicket(
request=request(Item=items),
login=self._login,
password=self._password,
language='RUS',
)
if response['error'] is not None:
raise APIError(f'Response body contains error: {response["error"]}')
return response['value']
def get_response_by_ticket(self, ticket: str) -> List[dict]:
"""Метод используется для получения информации об отправлениях по ранее полученному билету.
Вызывает метод answerByTicketRequest используемый для получения информации
об отправлениях по ранее полученному билету.
https://tracking.pochta.ru/specification раздел "Пакетная обработка" п.4
:param ticket: Строка, содержащая номер ticket, полученного ранее при вызове getTicket
:return: Результаты пакетной обработки в виде списка словарей,
содержащих результаты выполнения запроса на пакетную обработку
"""
response = self._client.service.getResponseByTicket(
ticket=ticket,
login=self._login,
password=self._password,
)
if response['error'] is not None:
raise APIError(f'Response body contains error: {response["error"]}')
return response['value']['Item']
| 40.149701
| 99
| 0.670097
| 676
| 6,705
| 6.594675
| 0.409763
| 0.020413
| 0.02131
| 0.023553
| 0.365635
| 0.358008
| 0.290265
| 0.245402
| 0.245402
| 0.16061
| 0
| 0.006885
| 0.263535
| 6,705
| 166
| 100
| 40.391566
| 0.895909
| 0.560477
| 0
| 0.262295
| 0
| 0
| 0.122159
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081967
| false
| 0.098361
| 0.065574
| 0
| 0.311475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
4b91ba97fda9b2ee93796afb30a9ecc697c21159
| 1,205
|
py
|
Python
|
script.module.placenta/lib/resources/lib/modules/thexem.py
|
parser4life/tantrumrepo
|
3b37145f4772409e538cbddb0b7aa23be525772a
|
[
"Beerware"
] | 1
|
2021-05-09T19:55:51.000Z
|
2021-05-09T19:55:51.000Z
|
script.module.placenta/lib/resources/lib/modules/thexem.py
|
parser4life/tantrumrepo
|
3b37145f4772409e538cbddb0b7aa23be525772a
|
[
"Beerware"
] | null | null | null |
script.module.placenta/lib/resources/lib/modules/thexem.py
|
parser4life/tantrumrepo
|
3b37145f4772409e538cbddb0b7aa23be525772a
|
[
"Beerware"
] | 2
|
2020-04-01T22:11:12.000Z
|
2020-05-07T23:54:52.000Z
|
# -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: MuadDib
import json
from resources.lib.modules import client
URL_PATTERN = 'http://thexem.de/map/single?id=%s&origin=tvdb&season=%s&episode=%s&destination=scene'
def get_scene_episode_number(tvdbid, season, episode):
try:
url = URL_PATTERN % (tvdbid, season, episode)
r = client.request(url)
r = json.loads(r)
if r['result'] == 'success':
data = r['data']['scene']
return data['season'], data['episode']
except:
pass
return season, episode
| 36.515152
| 100
| 0.480498
| 129
| 1,205
| 4.449612
| 0.643411
| 0.067944
| 0.066202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003052
| 0.184232
| 1,205
| 32
| 101
| 37.65625
| 0.580875
| 0.418257
| 0
| 0
| 0
| 0.071429
| 0.217153
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.071429
| 0.142857
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
4b9490ebcc233667c0f331f949a3dfce27be8b1f
| 8,723
|
py
|
Python
|
hirebob/portal/forms.py
|
shantanub0/hirebob
|
5a55e97c6e220059964fbb55439b0189abae1307
|
[
"MIT"
] | null | null | null |
hirebob/portal/forms.py
|
shantanub0/hirebob
|
5a55e97c6e220059964fbb55439b0189abae1307
|
[
"MIT"
] | 1
|
2018-06-23T01:20:26.000Z
|
2018-06-25T21:49:17.000Z
|
hirebob/portal/forms.py
|
shantanub0/hirebob
|
5a55e97c6e220059964fbb55439b0189abae1307
|
[
"MIT"
] | 1
|
2018-06-14T12:11:59.000Z
|
2018-06-14T12:11:59.000Z
|
from django import forms
from .models import UserAccount, JobPost, JobPostActivity, UserProfile
class FormUserCreation(forms.ModelForm):
UserTypes = ((1, 'Applicants'), (2, 'Organisations'))
user_type = forms.ChoiceField(choices=UserTypes,
widget=forms.Select(attrs={'class': "form-control"}))
user_full_name = forms.CharField(max_length=100,
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Full Name'}))
email = forms.EmailField(max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID'}))
password = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'type': 'password',
'placeholder': 'Enter Password',
'minlength': '6',
'onkeyup': 'check();'}))
confirm_password = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'type': 'password',
'placeholder': 'Re-enter Password',
'minlength': '6',
'onkeyup': 'check();'}))
class Meta:
model = UserAccount
fields = ('user_type', 'user_full_name', 'email', 'password')
class FormLogin(forms.ModelForm):
email = forms.EmailField(max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID'}))
password = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'type': 'password',
'placeholder': 'Enter Password',
'minlength': '6'}))
class Meta:
model = UserAccount
fields = ('email', 'password')
class FormJobPost(forms.ModelForm):
Locations = (('Mumbai', 'Mumbai'), ('Navi Mumbai', 'Navi Mumbai'), ('Pune', 'Pune'))
job_types = (('Software Engineer', 'Software Engineer'), ('Database Admin', 'Database Admin'), ('DevOps', 'DevOps'))
jobs_skill = (('Java', 'Java'), ('Python', 'Python'), ('C', 'C'), ('C++', 'C++'))
job_location = forms.ChoiceField(choices=Locations,
widget=forms.Select(attrs={'class': "form-control"}))
job_type = forms.ChoiceField(choices=job_types,
widget=forms.Select(attrs={'class': "form-control"}))
job_skills = forms.ChoiceField(choices=jobs_skill,
widget=forms.Select(attrs={'class': "form-control"}))
job_title = forms.CharField(max_length=100,
required=True,
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter job title'}))
posted_by_email = forms.EmailField(max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID',
'readonly': True}))
job_description = forms.CharField(widget=forms.Textarea(attrs={'class': "form-control",
'placeholder': 'Enter Job Description'}))
class Meta:
model = JobPost
fields = ('job_type', 'job_skills', 'job_location', 'posted_by_email', 'job_description', 'job_title')
class FormApply(forms.ModelForm):
email = forms.EmailField(required=True,
max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID',
'readonly': True}))
to_email = forms.EmailField(required=True,
max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID',
'readonly': True}))
cover_letter = forms.CharField(required=True,
widget=forms.Textarea(attrs={'class': "form-control",
'placeholder': 'Cover Letter'}))
post_id = forms.IntegerField(required=True,
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Post ID',
'readonly': True}))
job_title = forms.CharField(required=True,
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Job Title'}))
class Meta:
model = JobPostActivity
fields = ('email', 'post_id')
class FormUploadImage(forms.Form):
user_image = forms.ImageField(widget=forms.FileInput())
class Meta:
model = UserAccount
fields = ('user_image', )
class FormUploadResume(forms.Form):
resume = forms.FileField()
class Meta:
model = UserAccount
fields = ('resume', )
class FormApplicantsInfo(forms.Form):
Gender = (('Male', 'Male'), ('Female', 'Female'), ('None', 'None'))
gender = forms.ChoiceField(choices=Gender,
widget=forms.Select(attrs={'class': "form-control"}))
email = forms.EmailField(max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Email ID',
'readonly': True}))
gmail = forms.EmailField(max_length=250,
help_text="Required. Invalid format",
widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter gmail id'}))
linkedin = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Linkedin profile'}))
skype_id = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter skype id'}))
about_me = forms.CharField(widget=forms.Textarea(attrs={'class': "form-control", 'placeholder': 'Enter About you'}))
address = forms.CharField(widget=forms.Textarea(attrs={'class': "form-control", 'placeholder': 'Enter your address'}))
birthday = forms.DateField(widget=forms.TextInput(attrs={'class': "form-control", 'placeholder': 'Enter DOB in DD-MM-YYYY'}))
job_title = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Job Title'}))
location = forms.CharField(widget=forms.TextInput(attrs={'class': "form-control",
'placeholder': 'Enter Your location'}))
class Meta:
model = UserProfile
fields = ('email', 'gmail', 'linkedin', 'skype_id', 'about_me', 'address', 'birthday', 'job_title',
'location', 'gender')
| 56.642857
| 129
| 0.458558
| 669
| 8,723
| 5.898356
| 0.171898
| 0.080841
| 0.099341
| 0.149012
| 0.667258
| 0.631272
| 0.591485
| 0.569437
| 0.525342
| 0.496452
| 0
| 0.006339
| 0.4213
| 8,723
| 153
| 130
| 57.013072
| 0.775357
| 0
| 0
| 0.5
| 0
| 0
| 0.206351
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.087302
| 0.015873
| 0
| 0.404762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
4b9af91c0efeb81facf6d27474553a4bb9a6505d
| 2,025
|
py
|
Python
|
tests/unit_tests/tasks/fortran/test_fortran_compiler.py
|
bblay/fab
|
bbdac7bae20c5b8695a2d56945c9593b4fda9c74
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit_tests/tasks/fortran/test_fortran_compiler.py
|
bblay/fab
|
bbdac7bae20c5b8695a2d56945c9593b4fda9c74
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit_tests/tasks/fortran/test_fortran_compiler.py
|
bblay/fab
|
bbdac7bae20c5b8695a2d56945c9593b4fda9c74
|
[
"BSD-3-Clause"
] | null | null | null |
from pathlib import Path
from unittest import mock
import pytest
from fab.build_config import AddFlags
from fab.dep_tree import AnalysedFile
from fab.steps.compile_fortran import CompileFortran
# todo: we might have liked to reuse this from test_dep_tree
from fab.util import CompiledFile
@pytest.fixture
def src_tree():
return {
Path('src/foo.f90'): AnalysedFile(fpath=Path('src/foo.f90'), file_hash=None),
Path('src/root.f90'): AnalysedFile(
fpath=Path('src/root.f90'), file_deps={Path('src/a.f90'), Path('src/b.f90')}, file_hash=None),
Path('src/a.f90'): AnalysedFile(fpath=Path('src/a.f90'), file_deps={Path('src/c.f90')}, file_hash=None),
Path('src/b.f90'): AnalysedFile(fpath=Path('src/b.f90'), file_deps={Path('src/c.f90')}, file_hash=None),
Path('src/c.f90'): AnalysedFile(fpath=Path('src/c.f90'), file_deps=set(), file_hash=None),
}
class Test_run(object):
# todo: almost identical to the c compiler test
def test_vanilla(self, src_tree):
# ensure the compile passes match the build tree
config = mock.Mock(workspace=Path('foo/src'), multiprocessing=False)
c_compiler = CompileFortran(
compiler='gcc', common_flags=['-c'], path_flags=[AddFlags(match='foo/src/*', flags=['-Dhello'])])
def foo(items, func):
return [CompiledFile(af, output_fpath=None) for af in items]
with mock.patch('fab.steps.Step.run_mp', side_effect=foo) as mock_run_mp:
c_compiler.run(artefact_store={'build_tree': src_tree}, config=config)
# 1st pass
mock_run_mp.assert_any_call(
items={src_tree[Path('src/foo.f90')], src_tree[Path('src/c.f90')]}, func=mock.ANY)
# 2nd pass
mock_run_mp.assert_any_call(
items={src_tree[Path('src/a.f90')], src_tree[Path('src/b.f90')]}, func=mock.ANY)
# last pass
mock_run_mp.assert_called_with(items={src_tree[Path('src/root.f90')]}, func=mock.ANY)
| 38.942308
| 112
| 0.651852
| 299
| 2,025
| 4.257525
| 0.29097
| 0.104478
| 0.078555
| 0.094266
| 0.35978
| 0.169678
| 0.135114
| 0.135114
| 0.135114
| 0.135114
| 0
| 0.02457
| 0.196049
| 2,025
| 51
| 113
| 39.705882
| 0.757371
| 0.088395
| 0
| 0.064516
| 0
| 0
| 0.133225
| 0.011419
| 0
| 0
| 0
| 0.019608
| 0.096774
| 1
| 0.096774
| false
| 0
| 0.225806
| 0.064516
| 0.419355
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4ba4c531fc5b73ca047fb0191f3bbb5ca13cf62d
| 209
|
py
|
Python
|
udacity/cloud-native-application-architecture/3-message-passing/lesson-3-implementing-message-passing/kafka-python-demo/producer.py
|
thomasrobertz/mooc
|
cb87365bfcbe8ccf972f36d70a251c73b3c15a7b
|
[
"MIT"
] | null | null | null |
udacity/cloud-native-application-architecture/3-message-passing/lesson-3-implementing-message-passing/kafka-python-demo/producer.py
|
thomasrobertz/mooc
|
cb87365bfcbe8ccf972f36d70a251c73b3c15a7b
|
[
"MIT"
] | 13
|
2021-12-14T20:59:34.000Z
|
2022-03-02T11:09:34.000Z
|
udacity/cloud-native-application-architecture/3-message-passing/lesson-3-implementing-message-passing/kafka-python-demo/producer.py
|
thomasrobertz/mooc
|
cb87365bfcbe8ccf972f36d70a251c73b3c15a7b
|
[
"MIT"
] | 1
|
2020-08-20T12:53:43.000Z
|
2020-08-20T12:53:43.000Z
|
from kafka import KafkaProducer
TOPIC_NAME = 'items'
KAFKA_SERVER = 'localhost:9092'
producer = KafkaProducer(bootstrap_servers=KAFKA_SERVER)
producer.send(TOPIC_NAME, b'Test Message!!!')
producer.flush()
| 19
| 56
| 0.789474
| 26
| 209
| 6.153846
| 0.692308
| 0.1125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.100478
| 209
| 10
| 57
| 20.9
| 0.829787
| 0
| 0
| 0
| 0
| 0
| 0.162679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
2991579a0641f47ea260ec96e0a53c12f4df3dbf
| 342
|
py
|
Python
|
authors/apps/author_follows/urls.py
|
andela/ah-backend-dojo
|
f2b14f15c4af906da846cafe722f13868d58371f
|
[
"BSD-3-Clause"
] | 3
|
2019-05-01T10:41:09.000Z
|
2021-04-25T22:17:20.000Z
|
authors/apps/author_follows/urls.py
|
andela/ah-backend-dojo
|
f2b14f15c4af906da846cafe722f13868d58371f
|
[
"BSD-3-Clause"
] | 24
|
2019-04-23T14:56:21.000Z
|
2021-12-13T19:58:37.000Z
|
authors/apps/author_follows/urls.py
|
andela/ah-backend-dojo
|
f2b14f15c4af906da846cafe722f13868d58371f
|
[
"BSD-3-Clause"
] | 4
|
2019-06-29T10:40:32.000Z
|
2022-01-04T11:44:53.000Z
|
from django.urls import path
from .views import FollowStatsViews, AuthorFollowViews
urlpatterns = [
# /authors/followers/ or ../following/
path("<str:follow_state>/", FollowStatsViews.as_view(), name="follows"),
# /authors/<author_username>/follow
path("<str:username>/follow/", AuthorFollowViews.as_view(), name="follow")
]
| 34.2
| 78
| 0.719298
| 37
| 342
| 6.540541
| 0.594595
| 0.057851
| 0.082645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 342
| 10
| 79
| 34.2
| 0.806667
| 0.204678
| 0
| 0
| 0
| 0
| 0.2
| 0.081481
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29978909888062a7973e1bdbe5b82311fd8d9b27
| 6,173
|
py
|
Python
|
main.py
|
ml4design/text-processing-module
|
f1bfe1a49d58156e9e48e5ef69b980f89a5981ea
|
[
"MIT"
] | null | null | null |
main.py
|
ml4design/text-processing-module
|
f1bfe1a49d58156e9e48e5ef69b980f89a5981ea
|
[
"MIT"
] | null | null | null |
main.py
|
ml4design/text-processing-module
|
f1bfe1a49d58156e9e48e5ef69b980f89a5981ea
|
[
"MIT"
] | null | null | null |
import pandas as pd
from preprocessing import preprocess
from wordclouds import wordcloud, find_by_word
from sentiment_analysis import calculate_sentiment, find_by_sentiment
import nltk
import os
import tempfile
from topic_modelling import lda_topic_model, show_topics, show_example_sentences_by_topic
os.environ["MPLCONFIGDIR"] = tempfile.gettempdir()
nltk.download('punkt')
nltk.download('stopwords')
pd.set_option('display.max_columns', None)
#####################################################
# READING THE DATA #
#####################################################
# In this tutorial we will mostly deal with comma separated files (CSV) (similar to the structure of Excel files). Each line of the file is a data record. Each record consists of one or more fields, separated by commas. Check here for more information https://en.wikipedia.org/wiki/Comma-separated_values
# reads the file named "students_eng.csv".
# If you want to read a different file you need to (1) upload it in replit and (2) change "students_eng.csv" to the name of the newly uploaded file. Here we use the Pandas library ("pd") to read our file and in return we get a Pandas Dataframe. For faster processing and experimentation you can also select different subsets of the file's content through the nrows parameter -> number of lines to read.
students_data = pd.read_csv("data/students_eng.csv")
# With the next line you can print the data you just read and see how a Pandas Dataframe looks like (seems quite similar to Excel)
print(students_data.head(3))
# As you can see the data is separated in columns. Let's see how we can get the data from a specific column. The following line allows us to get only the data inside the column named "students_needs". Other options are: study_programme, degree_programme, planned_grad_year, today_feeling, physical_health, student_needs, students_sugg_to_improve_wellbeing
students_data = students_data['student_needs']
#################################################
# TEXT PREPROCESSING #
#################################################
# Here we will pre-process our entire text collection.
# First, we need to merge all the different lines of the "comments" into one big corpus, so that we can later analyze it.
corpus = students_data.to_list()
print(corpus[0:5])
# Then we need to "preprocess" our text. To do so we use the following line of code (more details on what happens under the hood could be found in the "preprocessing.py" file - feel free to take a look at it).
# The following code: makes all words lowercase, create word tokens, removes stopwords, punctuations, and digits, and reduces inflected words to their word stem (stemming).Feel free to experiment by turning any of the following values from True to False. In addition, you can add extra words which you do not want to include in your analysis by adding them within the extra_stopwords brackets e.g. extra_stopwords=["people"] would remove the word people from everywhere in the document. Hint: don't forget to use the quotes!
# tokens = [preprocess(sentence, lower=True, rem_punc=True, word_tokenization=True, rem_numb=True, rem_stopwords=True, stem=True, extra_stopwords = []) for sentence in students_data.to_list()]
# print(tokens)
#############################################
# WORD FREQUENCIES #
#############################################
# Word frequencies calculation is the most basic tool in text processing yet it gives a comprehensive picture of the content in your text collection. One the most ways to visualize word frequencies is WordCloud (which you've already seen if you opened Voyant)
# This function needs two things from you:
# 1. tokens -- the result of our preprocessing step
# 2. the name of the picture it will generate and save to your directory
# 3. Number of words to show
# wordcloud(words = tokens, name_of_output = 'wordcloud', num = 10)
# Text processing often requires working with examples, because words are often contextual and it is difficult to understand what is happening in your text collection. For this purpose, you can find documents by pieces of texts.
# This function needs two things from you:
# 1. tokens -- the result of our preprocessing step (it will look for examples in this collection)
# 2. a word or a phrase the text should include
# test = find_by_word(tokens, 'studi')
#print(test)
#############################################
# Sentiment analysis #
#############################################
# The aim of sentiment analysis is to calculate how emotional your texts are and what is the valence of these texts. In our example we use VADER (Valence Aware Dictionary and sEntiment Reasoner) but you can find other various sentiment analysis tools in the internet.
# VADER calculated how positive, neutral, and negative a text is. It also calculates a compound score which considers all three metrics to give you a precise measurement of the sentiment.
# This function requires only the preprocessed collection of texts
# sent_result = calculate_sentiment(tokens)
# print(sent_result)
# Now, when the sentiment scores are calculated, you can find the most interesting texts by looking at the documents with highest scores (in this example, we look at the 5 most positive documents).
# This function requires three things:
# 1. The result of sentiment calculation
# 2. What score you're interested in
# 3. Number of examples you want to get
# res = find_by_sentiment(df_with_scores = sent_result, score_type = 'pos', num_of_examples = 5)
# print(res)
#############################################
# TOPIC MODELING #
#############################################
# num_of_topics = 4
# word_num_per_topic = 5
# lda_model = lda_topic_model(tokens, topic_num=num_of_topics)
# show_topics(lda_model, word_num_per_topic )
# Check examples assigned to a particular topic ####
# num_of_examples = 5
# show_example_sentences_by_topic(corpus, tokens, lda_model, word_num_per_topic,topic_to_check=1, num_of_examp_to_show = num_of_examples)
| 55.116071
| 524
| 0.706302
| 907
| 6,173
| 4.699008
| 0.363837
| 0.009385
| 0.009855
| 0.010558
| 0.067574
| 0.044111
| 0.033318
| 0.033318
| 0.033318
| 0.033318
| 0
| 0.004071
| 0.164426
| 6,173
| 112
| 525
| 55.116071
| 0.822218
| 0.794751
| 0
| 0
| 0
| 0
| 0.109267
| 0.029046
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.470588
| 0
| 0.470588
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29ac9c03bbaa51b34d7d739bc8607fc9dd0af610
| 309
|
py
|
Python
|
main.py
|
yaojenkuo/stockflow
|
946609c2fcc1d602032672b57ae7119b4cadae8d
|
[
"MIT"
] | 33
|
2015-03-08T00:43:37.000Z
|
2021-02-18T23:40:05.000Z
|
main.py
|
Asoul/stockflow
|
946609c2fcc1d602032672b57ae7119b4cadae8d
|
[
"MIT"
] | null | null | null |
main.py
|
Asoul/stockflow
|
946609c2fcc1d602032672b57ae7119b4cadae8d
|
[
"MIT"
] | 25
|
2015-03-07T15:57:23.000Z
|
2021-07-05T01:32:32.000Z
|
#!/bin/python
# -*- coding: utf-8 -*-
'''基本範例格式'''
import sys
from ctrls.Tester import Tester
from models.exampleModel import exampleModel
def main():
numbers = ['1314']# 股票編號
tester = Tester(numbers, exampleModel)# 使用測試元件
tester.run()# 模擬
if __name__ == '__main__':
sys.exit(main())
| 17.166667
| 50
| 0.647249
| 37
| 309
| 5.189189
| 0.648649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02008
| 0.194175
| 309
| 17
| 51
| 18.176471
| 0.751004
| 0.18123
| 0
| 0
| 0
| 0
| 0.049383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29b5b93fcc93149c869189a925d3bab4277eed76
| 748
|
py
|
Python
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 19
|
2018-11-24T03:09:59.000Z
|
2021-02-11T09:20:11.000Z
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 13
|
2018-11-24T13:16:38.000Z
|
2022-02-22T17:27:08.000Z
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 4
|
2018-11-26T16:14:42.000Z
|
2021-10-20T14:20:40.000Z
|
"""Get information about this package."""
def info(system):
"""Get information about this package."""
import googledevices.utils.const as package
print("Projectname: ", package.NAME)
print("Version: ", package.VERSION)
print("GitHub link: ", package.URLS.get("github"))
print("PyPi link: ", package.URLS.get("pypi"))
print("Maintainers:")
for maintainer in package.MAINTAINERS:
print(" ", maintainer.get("name"), "(", maintainer.get("github"), ")")
print("")
if system:
import platform
print("")
print("System: ", platform.system())
print("Version: ", platform.version())
print("Python version: ", platform.python_version())
| 32.521739
| 81
| 0.593583
| 75
| 748
| 5.906667
| 0.373333
| 0.063205
| 0.085779
| 0.103837
| 0.13544
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240642
| 748
| 22
| 82
| 34
| 0.77993
| 0.09492
| 0
| 0.125
| 0
| 0
| 0.217718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.125
| 0
| 0.1875
| 0.6875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
29c4a45e5143815cb47c3724fcaecb30960fac72
| 475
|
py
|
Python
|
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
from ._core import Collection, local, task
@task(name="main", default=True)
def start_main(c):
local(f"overmind start -l {','.join(c.start.main + c.start.minimal)}", pty=True)
@task(name="minimal")
def start_minimal(c):
local(f"overmind start -l {','.join(c.start.minimal)}", pty=True)
@task(name="all")
def start_all(c):
local("overmind start", pty=True)
ns = Collection("start")
ns.add_task(start_all)
ns.add_task(start_main)
ns.add_task(start_minimal)
| 20.652174
| 84
| 0.692632
| 77
| 475
| 4.142857
| 0.285714
| 0.15047
| 0.08464
| 0.131661
| 0.351097
| 0.351097
| 0.351097
| 0.194357
| 0.194357
| 0
| 0
| 0
| 0.12
| 475
| 22
| 85
| 21.590909
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.290526
| 0.103158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29c698fcf98da3c177cd1347dd70acef351370fb
| 888
|
py
|
Python
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 7
|
2018-05-01T19:39:17.000Z
|
2020-01-02T17:11:05.000Z
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 10
|
2018-12-10T22:16:43.000Z
|
2020-08-27T18:23:45.000Z
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 2
|
2021-04-16T08:20:17.000Z
|
2022-01-06T14:06:44.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 17 23:14:28 2018
@author: Akihiro Inui
"""
def rolloff(input_power_spectrum: list, param: float=0.85) -> float:
"""
Spectral Rolloff
:param input_power_spectrum: power spectrum in list
:param param: threadshold for roll off
:return Spectral Rolloff
"""
assert (param <= 0 or param >= 1) is False, "parameter must be between 0 and 1"
# Initialize energy and FFT number
energy = 0
count = 0
# Calculate total energy
total_energy = sum(input_power_spectrum[:]**2)
# Find Count which has energy below param*total_energy
while energy <= param*total_energy and count < len(input_power_spectrum):
energy = pow(input_power_spectrum[count], 2) + energy
count += 1
# Normalise Spectral Rolloff
return count/len(input_power_spectrum)
| 28.645161
| 83
| 0.667793
| 122
| 888
| 4.737705
| 0.516393
| 0.157439
| 0.186851
| 0.062284
| 0.089965
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038179
| 0.233108
| 888
| 30
| 84
| 29.6
| 0.810573
| 0.420045
| 0
| 0
| 0
| 0
| 0.069474
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29c8dfdb3c65c5e9847d8ee2d3b8fe9a5f54498a
| 1,000
|
py
|
Python
|
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | 1
|
2020-02-11T16:10:11.000Z
|
2020-02-11T16:10:11.000Z
|
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | null | null | null |
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | null | null | null |
from common import is_connection_ok
import paramiko
"""
execute_ssh(host, port, username, password, cmd)
"""
def execute_ssh(host, username, password, cmd, port='22'):
if is_connection_ok():
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=host, port=int(port), username=username, password=password)
_, stdout, stderr = ssh.exec_command(cmd, timeout=5)
res = stdout.read().decode()
error = stderr.read().decode('utf-8')
if error:
print(error)
return 'SSH_CONNECTION_FAIL'
else:
ssh.close()
return 'SSH_CONNECTION_SUCCESS with username : {username} and password {password}'.format(username=username, password=password)
except Exception:
print('*')
return 'SSH_CONNECTION_FAIL'
else:
return 'CONNECTION_NOT_FOUND'
| 35.714286
| 143
| 0.601
| 107
| 1,000
| 5.429907
| 0.485981
| 0.110155
| 0.098107
| 0.110155
| 0.092943
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005658
| 0.293
| 1,000
| 28
| 144
| 35.714286
| 0.816124
| 0
| 0
| 0.181818
| 0
| 0
| 0.14709
| 0.02328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0.136364
| 0.090909
| 0
| 0.318182
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29ca0af350d167975f57568f8d8d244098802dd2
| 376
|
py
|
Python
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 4
|
2019-04-02T09:44:18.000Z
|
2020-04-15T11:47:49.000Z
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 1
|
2019-03-04T17:20:39.000Z
|
2019-03-04T17:48:18.000Z
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 1
|
2020-04-15T11:47:50.000Z
|
2020-04-15T11:47:50.000Z
|
'''
请求头
'''
HEADERS_IPHONE = {'user-agent': (
'Mozilla/5.0 '
'(iPhone; CPU iPhone OS 6_0 like Mac OS X) '
'AppleWebKit/536.26 (KHTML, like Gecko) '
'Version/6.0 Mobile/10A5376e Safari/8536.25'
)}
HEADERS_CHROME = {'user-agent': (
'Mozilla/5.0 (X11; Linux x86_64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/67.0.3396.99 Safari/537.36'
)}
| 22.117647
| 48
| 0.617021
| 57
| 376
| 4
| 0.614035
| 0.078947
| 0.140351
| 0.149123
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166113
| 0.199468
| 376
| 16
| 49
| 23.5
| 0.591362
| 0.007979
| 0
| 0.181818
| 0
| 0
| 0.709589
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29dd6423703e7bd3d65394220ac73d337651b108
| 1,603
|
py
|
Python
|
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | 2
|
2020-11-01T13:22:11.000Z
|
2020-11-01T13:22:20.000Z
|
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | null | null | null |
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | null | null | null |
from spinnman.messages.scp.abstract_messages.abstract_scp_request\
import AbstractSCPRequest
from spinnman.messages.sdp.sdp_flag import SDPFlag
from spinnman.messages.sdp.sdp_header import SDPHeader
from spinnman.messages.scp.scp_request_header import SCPRequestHeader
from spinnman.messages.scp.scp_command import SCPCommand
from spinnman.messages.scp.impl.scp_version_response import SCPVersionResponse
class SCPVersionRequest(AbstractSCPRequest):
""" An SCP request to read the version of software running on a core
"""
def __init__(self, x, y, p):
"""
:param x: The x-coordinate of the chip to read from, between 0 and 255
:type x: int
:param y: The y-coordinate of the chip to read from, between 0 and 255
:type y: int
:param p: The id of the processor to read the version from,\
between 0 and 31
:type p: int
:raise spinnman.exceptions.SpinnmanInvalidParameterException:
* If the chip coordinates are out of range
* If the processor is out of range
"""
super(SCPVersionRequest, self).__init__(
SDPHeader(
flags=SDPFlag.REPLY_EXPECTED, destination_port=0,
destination_cpu=p, destination_chip_x=x,
destination_chip_y=y),
SCPRequestHeader(command=SCPCommand.CMD_VER))
def get_scp_response(self):
""" See\
:py:meth:`spinnman.messages.scp.abstract_scp_request.AbstractSCPRequest.get_scp_response`
"""
return SCPVersionResponse()
| 40.075
| 101
| 0.674984
| 198
| 1,603
| 5.30303
| 0.368687
| 0.106667
| 0.114286
| 0.087619
| 0.188571
| 0.089524
| 0.089524
| 0.089524
| 0.089524
| 0.089524
| 0
| 0.010059
| 0.25577
| 1,603
| 39
| 102
| 41.102564
| 0.870075
| 0.383656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.352941
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29e3af095a46b5abdfb783f45e3fb0c6a6c5b81f
| 652
|
py
|
Python
|
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | 2
|
2018-02-24T17:20:02.000Z
|
2018-02-24T17:25:43.000Z
|
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | null | null | null |
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | null | null | null |
class Solution(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
## my solution
# res=''
# for i in xrange(len(bin(m))-2):
# if m>>i & 1 == 0:
# res='0'+res
# elif (((m>>i) + 1) << i) <= n:
# res='0'+res
# else:
# res='1'+res
# return int(res,2)
## quick solution
c=0
for i in xrange(len(bin(m))-2):
if m>>i != n>>i:
c+=1
else:
break
return m>>c<<c
| 24.148148
| 44
| 0.328221
| 77
| 652
| 2.779221
| 0.376623
| 0.028037
| 0.056075
| 0.11215
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0
| 0.0347
| 0.513804
| 652
| 27
| 45
| 24.148148
| 0.640379
| 0.381902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29e805265bd23dadb56a588aaeba28a86de79226
| 4,250
|
py
|
Python
|
src/test/resources/scripts/Authentication.py
|
tomjbarry/Penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | 1
|
2019-02-25T05:55:34.000Z
|
2019-02-25T05:55:34.000Z
|
src/test/resources/scripts/Authentication.py
|
tomjbarry/penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | null | null | null |
src/test/resources/scripts/Authentication.py
|
tomjbarry/penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | null | null | null |
from PyConstants import Paths
from PyConstants import Codes
from PyConstants import CacheTimes
from PyBaseTest import BaseTest
from PyRequest import PyRequest
import time
class Authentication(BaseTest):
password = "testPassword123"
invalidPassword = "incorrectincorrect"
def runTests(self):
print("Running authentication tests")
self.testRegister(self.username, self.email)
token = self.testLogin(self.username)
self.testRegister(self.target, self.targetEmail)
self.testLogout(token)
time.sleep(CacheTimes.USER_USERNAME)
token = self.testLogin(self.username)
targetToken = self.testLogin(self.target)
time.sleep(CacheTimes.USER_USERNAME)
return targetToken, token
def testRegister(self, username, email):
invalidBody = {"username":username, "email":email}
body = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"email":email, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password + "s", "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password, "ageMinimum":False, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
restrictedBody = {"username":username, "password":"password1234567", "email":email, "confirmNewPassword":"password1234567", "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedPassword)
restrictedBody = {"username":"penstro", "password":self.password, "email":email, "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedUsername)
restrictedBody = {"username":username, "password":self.password, "email":"[email protected]", "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedEmail)
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, body, self.expectedResultCreated)
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, body, self.expectedExistsUsernameEmail)
def testLogin(self, username):
body = {"username":username, "password":self.invalidPassword}
PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, None, self.expectedInvalid)
PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, body, self.expectedDenied)
body = {"username":username, "password":self.password}
data = PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, body, self.expectedResultSuccess)
if 'dto' in data:
if 'result' in data['dto']:
print("TOKEN: " + str(data['dto']['result']))
return str(data['dto']['result'])
return None
def testLogout(self, token):
PyRequest().expectResponse(Paths.LOGOUT, PyRequest.POST, None, self.expectedDenied)
PyRequest(token).expectResponse(Paths.LOGOUT, PyRequest.POST, None, self.expectedSuccess)
| 57.432432
| 183
| 0.704941
| 386
| 4,250
| 7.756477
| 0.178756
| 0.101536
| 0.140281
| 0.132265
| 0.665665
| 0.56179
| 0.546426
| 0.515698
| 0.44155
| 0.44155
| 0
| 0.004812
| 0.168706
| 4,250
| 73
| 184
| 58.219178
| 0.842627
| 0
| 0
| 0.178571
| 0
| 0
| 0.154588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.25
| 0.107143
| 0
| 0.285714
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29f31b2343f07216325a81bd944dfce29b98de66
| 610
|
py
|
Python
|
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
# number of threads used to compute product of 2 matrices of dim. 1024
data_x = [1, 2, 3, 4, 8, 16, 32, 64, 128,
256, 512, 1024, 2048, 4096]
# execution time in seconds
data_y = [3.300059, 1.664494, 2.294884, 3.200235,
2.915945, 3.082389, 3.023162, 3.012096,
2.958028, 2.939918, 2.847527, 2.898556,
2.876036, 2.963720]
plt.figure()
plt.plot(data_x, data_y)
plt.xlabel('# of threads')
plt.xscale('log')
plt.ylabel('execution time in seconds')
plt.title('Exection times of 1024x1024 matrix multi with different thread counts')
plt.show()
| 33.888889
| 82
| 0.672131
| 103
| 610
| 3.941748
| 0.640777
| 0.044335
| 0.073892
| 0.108374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291242
| 0.195082
| 610
| 18
| 83
| 33.888889
| 0.535642
| 0.154098
| 0
| 0
| 0
| 0
| 0.212062
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29f348ce2221e92c79d25e0d2151332aec4f637c
| 1,100
|
py
|
Python
|
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | null | null | null |
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | 18
|
2020-12-02T16:37:21.000Z
|
2021-09-22T19:40:37.000Z
|
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | null | null | null |
# memoro.wsgi
# WSGI config for memoro project.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Sat Nov 28 13:44:01 2020 -0500
#
# Copyright (C) 2020 Bengfort.com
# For license information, see LICENSE
#
# ID: wsgi.py [] [email protected] $
"""
WSGI config for memoro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
##########################################################################
## Imports
##########################################################################
import os
from django.core.wsgi import get_wsgi_application
from dotenv import find_dotenv, load_dotenv
##########################################################################
## Load environment and create WSGI application
##########################################################################
load_dotenv(find_dotenv())
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'memoro.settings.development')
application = get_wsgi_application()
| 28.205128
| 78
| 0.555455
| 113
| 1,100
| 5.318584
| 0.575221
| 0.079867
| 0.043261
| 0.063228
| 0.086522
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.1
| 1,100
| 38
| 79
| 28.947368
| 0.584848
| 0.464545
| 0
| 0
| 0
| 0
| 0.180812
| 0.180812
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29f94d2b334b89e0c508fee4d9e22209246bc128
| 5,970
|
py
|
Python
|
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
from time import time
from typing import List
from core.security import verify_password
from db import users as DBUsers
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import JSONResponse
from models.user import DBUser
from schemas.user import (UserCreate, UserUpdateActivate, UserUpdatePassword,
UserUpdateSuperuser, UserView)
from sqlalchemy.orm import Session
from .deps import get_current_active_superuser, get_current_active_user, get_db
router = APIRouter(
prefix='/users',
tags=['users']
)
@router.get('/', response_model=List[UserView])
def get_all_users(skip: int = 0, limit: int = 100,
db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Retrieve users.
"""
return DBUsers.get_users(db, skip=skip, limit=limit)
@router.get("/me", response_model=UserView)
def get_user(db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve my user.
"""
return current_user
@router.get("/{user_id}", response_model=UserView)
def get_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve a user (only itself if not enough privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user == current_user:
return db_user
if not current_user.is_superuser:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The user does not have enough privileges"
)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
return db_user
@router.post("/{user_id}/reset_password", response_model=UserView)
def update_user_password_reset(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user password (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/{user_id}/activate", response_model=UserView)
def update_user_activate(
user_id: int, user_activate: UserUpdateActivate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user activation (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_activate(
db=db, user_id=user_id, activate=user_activate.activate)
return db_user
@router.post("/{user_id}/superuser", response_model=UserView)
def update_user_activate(
user_id: int, user_superuser: UserUpdateSuperuser, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user privileges (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_superuser(
db=db, user_id=user_id, superuser=user_superuser.superuser)
return db_user
@router.post("/{user_id}/password", response_model=UserView)
def update_user_password(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Update personal user password (require previous password).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
if db_user != current_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Can only update its own password"
)
if user_passwords.old_password == user_passwords.new_password:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="New password cannot be the same as the old one")
if not verify_password(user_passwords.old_password, db_user.password):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Incorrect old password")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/", response_model=UserView)
def create_user(user: UserCreate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Create a user.
"""
db_user = DBUsers.get_user_by_email(db, email=user.email)
if db_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
return DBUsers.create_user(db=db, user=user)
@router.delete("/{user_id}", response_class=JSONResponse)
def delete_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Delete a user (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.delete_user(db=db, user_id=user_id)
return JSONResponse(content={"status": "ok", "user_id": user_id})
| 36.402439
| 109
| 0.701675
| 786
| 5,970
| 5.043257
| 0.132316
| 0.05449
| 0.030272
| 0.036327
| 0.658426
| 0.640767
| 0.614026
| 0.591322
| 0.566095
| 0.566095
| 0
| 0.007781
| 0.203518
| 5,970
| 163
| 110
| 36.625767
| 0.825868
| 0.064322
| 0
| 0.486486
| 0
| 0
| 0.070092
| 0.004587
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0.153153
| 0.09009
| 0
| 0.261261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29faa4ea69ec98280ad24b2003914856eee015a8
| 12,800
|
py
|
Python
|
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
import logging
import os
import psycopg2
import time
import shlex
import subprocess
import shutil
import threading
from urllib.parse import urlparse
logger = logging.getLogger(__name__)
class Postgresql:
CONN_OPTIONS = {
'connect_timeout': 3,
'options': '-c statement_timeout=2000',
}
_conn = None
_cursor_holder = None
def __init__(self, config, psql_config):
self.config = config
self.psql_config = psql_config
self.name = config.name
self.listen_addresses, self.port = config.listen_address.split(':')
self.data_dir = config.data_dir
self.recovery_conf = os.path.join(self.data_dir, 'recovery.conf')
self.pid_path = os.path.join(self.data_dir, 'postmaster.pid')
self._pg_ctl = ('pg_ctl', '-w', '-D', self.data_dir)
self.members = set() # list of already existing replication slots
self.promoted = False
def parseurl(self, url):
r = urlparse('postgres://' + url)
options = {
'host': r.hostname,
'port': r.port or 5432,
'user': self.config.repl_user,
'password': self.config.repl_password,
'database': self.config.dbname,
'fallback_application_name': 'Governor',
}
options.update(self.CONN_OPTIONS)
return options
def pg_ctl(self, *args, **kwargs):
cmd = self._pg_ctl + args
logger.info(cmd)
return subprocess.call(cmd, **kwargs)
def connection(self):
if not self._conn or self._conn.closed:
self._conn = psycopg2.connect(
dbname=self.config.dbname,
port=self.port,
user=self.config.user,
password=self.config.password,
**self.CONN_OPTIONS
)
self._conn.autocommit = True
return self._conn
def _cursor(self):
if not self._cursor_holder or self._cursor_holder.closed:
self._cursor_holder = self.connection().cursor()
return self._cursor_holder
def disconnect(self):
if self._conn:
self._conn.close()
self._conn = self._cursor_holder = None
def query(self, sql, *params):
max_attempts = 3
for i in range(max_attempts):
ex = None
try:
cursor = self._cursor()
cursor.execute(sql, params)
return cursor
except psycopg2.InterfaceError as e:
ex = e
except psycopg2.OperationalError as e:
if self._conn and self._conn.closed == 0:
raise e
ex = e
self.disconnect()
time.sleep(5)
if ex:
raise ex
def data_directory_empty(self):
return not (os.path.exists(self.data_dir) and os.listdir(self.data_dir))
def initialize(self):
if subprocess.call(['initdb', '-D', self.data_dir, '--encoding', 'UTF-8']) == 0:
self.write_pg_hba()
return True
return False
def sync_from_leader(self, leader):
r = self.parseurl(leader.value)
env = os.environ.copy()
if r['password'] is not None:
pgpass = os.path.join(os.environ['ROOT'], 'pgpass')
with open(pgpass, 'w') as f:
os.fchmod(f.fileno(), 0o600)
f.write('{host}:{port}:*:{user}:{password}\n'.format(**r))
env['PGPASSFILE'] = pgpass
try:
subprocess.check_call([
'pg_basebackup', '-R', '-P', '-w',
'-D', self.data_dir,
'--host', r['host'],
'--port', str(r['port']),
'-U', self.config.repl_user,
], env=env)
except subprocess.CalledProcessError:
return False
finally:
os.chmod(self.data_dir, 0o700)
return True
def is_leader(self):
is_leader = not self.query('SELECT pg_is_in_recovery()').fetchone()[0]
if is_leader:
self.promoted = False
return is_leader
def is_running(self):
return self.pg_ctl('status', stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0
def start_threaded(self):
logger = logging.getLogger('postgres')
cmd = [
'postgres', '-i',
'-p', self.port,
'-h', self.listen_addresses,
'-D', self.data_dir,
] + self.psql_config
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
while True:
line = proc.stdout.readline()
if not line:
break
logging.info(line)
def start(self):
if self.is_running():
self.load_replication_slots()
logger.error('Cannot start PostgreSQL because one is already running.')
return False
if os.path.exists(self.pid_path):
os.remove(self.pid_path)
logger.info('Removed %s', self.pid_path)
self.disconnect()
thread = threading.Thread(target=self.start_threaded)
thread.daemon = True
thread.start()
return True
def stop(self):
self.disconnect()
return self.pg_ctl('stop', '-m', 'fast') != 0
def reload(self):
return self.pg_ctl('reload') == 0
def restart(self):
self.disconnect()
return self.pg_ctl('restart', '-m', 'fast') == 0
def is_healthy(self):
if not self.is_running():
logger.warning('Postgresql is not running.')
return False
return True
def is_healthiest_node(self, cluster):
if self.is_leader():
return True
if int(cluster.optime.value) - self.xlog_position() > self.config.maximum_lag:
return False
for name, m in cluster.members.items():
if name == self.name:
continue
try:
member_conn = psycopg2.connect(**self.parseurl(m.value))
member_conn.autocommit = True
member_cursor = member_conn.cursor()
member_cursor.execute(
"SELECT pg_is_in_recovery(), %s - (pg_last_xlog_replay_location() - '0/0000000'::pg_lsn)",
(self.xlog_position(), ))
row = member_cursor.fetchone()
member_cursor.close()
member_conn.close()
logger.error([self.name, name, row])
if not row[0] or row[1] < 0:
return False
except psycopg2.Error:
continue
return True
def write_pg_hba(self):
if self.config.password:
method = 'md5'
else:
logger.warning('No password specified')
method = 'trust'
hba = ['local all all trust']
for subnet in self.config.allow_address.split():
hba.append(' '.join(['host', self.config.dbname, self.config.user, subnet, method]))
if self.config.repl_password:
method = 'md5'
else:
logger.warning('No replication password specified')
method = 'trust'
for subnet in self.config.repl_allow_address.split():
hba.append(' '.join(['host', 'replication', self.config.repl_user, subnet, method]))
config = ConfigFile(os.path.join(self.data_dir, 'pg_hba.conf'))
config.write_config(*hba)
def primary_conninfo(self, leader_url):
r = self.parseurl(leader_url)
values = ['{}={}'.format(k, r[k]) for k in ['user', 'host', 'port']]
if r['password'] is not None:
values.append('password={}'.format(r['password']))
return '{} sslmode=prefer sslcompression=1'.format(' '.join(values))
def check_recovery_conf(self, leader):
if not os.path.isfile(self.recovery_conf):
return False
pattern = (leader and self.primary_conninfo(leader.value))
for key, value in RecoveryConf(self.recovery_conf).load_config():
if key == 'primary_conninfo':
if not pattern:
return False
return value[1:-1] == pattern
return not pattern
def write_recovery_conf(self, leader):
contents = [
('standby_mode', 'on'),
('recovery_target_timeline', 'latest'),
]
if leader:
contents.append(('primary_slot_name', self.name))
contents.append(('primary_conninfo', self.primary_conninfo(leader.value)))
config = RecoveryConf(self.recovery_conf)
config.write_config(*contents, truncate = not leader)
def follow_the_leader(self, leader):
if not self.check_recovery_conf(leader):
self.write_recovery_conf(leader)
self.restart()
def promote(self):
self.promoted = (self.pg_ctl('promote') == 0)
return self.promoted
def create_users(self):
op = ('ALTER' if self.config.user == 'postgres' else 'CREATE')
query = '{} USER "{}" WITH {}'.format
# normal client user
self.create_user(query(op, self.config.user, 'SUPERUSER'), self.config.password)
# replication user
self.create_user(query('CREATE', self.config.repl_user, 'REPLICATION'), self.config.repl_password)
def create_user(self, query, password):
if password:
return self.query(query + ' ENCRYPTED PASSWORD %s', password)
return self.query(query)
def xlog_position(self):
return self.query("""SELECT CASE WHEN pg_is_in_recovery()
THEN pg_last_xlog_replay_location() - '0/0000000'::pg_lsn
ELSE pg_current_xlog_location() - '0/00000'::pg_lsn END""").fetchone()[0]
def load_replication_slots(self):
cursor = self.query("SELECT slot_name FROM pg_replication_slots WHERE slot_type='physical'")
self.members = set(r[0] for r in cursor)
def sync_replication_slots(self, members):
members = set(name for name in members if name != self.name)
# drop unused slots
for slot in self.members - members:
self.query("""SELECT pg_drop_replication_slot(%s)
WHERE EXISTS(SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
# create new slots
for slot in members - self.members:
self.query("""SELECT pg_create_physical_replication_slot(%s)
WHERE NOT EXISTS (SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
self.members = members
def create_replication_slots(self, cluster):
self.sync_replication_slots([name for name in cluster.members if name != self.name])
def drop_replication_slots(self):
self.sync_replication_slots([])
def last_operation(self):
return self.xlog_position()
class ConfigFile:
__slots__ = ('path',)
def __init__(self, path):
self.path = path
backup = self.path + '.backup'
if not os.path.exists(backup):
if os.path.exists(self.path):
os.rename(self.path, backup)
else:
with open(backup, 'w'): pass
def reload_backup(self):
shutil.copy(self.path + '.backup', self.path)
def load_config(self):
with open(self.path) as file:
for line in file:
if not line.startswith('#'):
yield line
def write_config(self, *lines, reload=True, check_duplicates=True, truncate=False):
if reload:
self.reload_backup()
if check_duplicates:
config = set(self.load_config())
else:
config = ()
mode = ('w' if truncate else 'a')
with open(self.path, mode) as file:
for l in lines:
if l not in config:
file.write('\n' + l)
file.write('\n')
class RecoveryConf(ConfigFile):
def load_config(self):
for line in super().load_config():
k, _, v = line.strip().partition(' = ')
yield (k, v)
def write_config(self, *args, reload=True, check_duplicates=True, **kwargs):
if reload:
self.reload_backup()
if check_duplicates:
config = set(i[0] for i in self.load_config())
else:
config = ()
args = ("{} = '{}'".format(k, v) for k, v in args if k not in config)
return super().write_config(*args, reload=False, check_duplicates=False, **kwargs)
| 33.952255
| 114
| 0.563984
| 1,485
| 12,800
| 4.701684
| 0.186532
| 0.03151
| 0.01733
| 0.006875
| 0.171584
| 0.097393
| 0.070753
| 0.041249
| 0.041249
| 0.03065
| 0
| 0.007933
| 0.320469
| 12,800
| 376
| 115
| 34.042553
| 0.79478
| 0.008828
| 0
| 0.168285
| 0
| 0
| 0.126962
| 0.020503
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126214
| false
| 0.061489
| 0.029126
| 0.016181
| 0.288026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29fda9d9b2256b8b4efc118aa8ea61e7cbc1a09c
| 264
|
py
|
Python
|
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template("macro.html", type="text", value="from endpoint")
if __name__ == '__main__':
print(app.url_map)
app.run(debug=True, host="0.0.0.0")
| 18.857143
| 76
| 0.674242
| 39
| 264
| 4.179487
| 0.692308
| 0.03681
| 0.03681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.151515
| 264
| 13
| 77
| 20.307692
| 0.709821
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0.125
| 0.375
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
4b065798f8f3175be2995f3dc86fae9e7dc987b7
| 1,249
|
py
|
Python
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2018-10-05T17:03:01.000Z
|
2018-10-05T17:03:01.000Z
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2017-01-06T19:20:32.000Z
|
2017-01-06T19:20:32.000Z
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 7
|
2016-12-16T15:42:05.000Z
|
2020-09-05T01:11:27.000Z
|
import pytest
from django.test import TestCase
from django.test import override_settings
import ozpcenter.api.contact_type.model_access as model_access
from ozpcenter.models import ContactType
from tests.cases.factories import ContactTypeFactory
@pytest.mark.model_access
@override_settings(ES_ENABLED=False)
class ContactTypeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.contact_types = ContactTypeFactory.create_batch(5)
def setUp(self):
pass
def test__get_all_contact_types(self):
results = list(model_access.get_all_contact_types().order_by("id"))
self.assertListEqual(results, self.contact_types)
def test__get_contact_type_by_name(self):
expected = self.contact_types[0]
result = model_access.get_contact_type_by_name(expected.name)
self.assertEqual(result, expected)
def test__get_contact_type_by_name__not_found(self):
contact_type = model_access.get_contact_type_by_name('Not Existent', False)
self.assertIsNone(contact_type)
def test__get_contact_type_by_name__not_found_raises_error(self):
with self.assertRaises(ContactType.DoesNotExist):
model_access.get_contact_type_by_name('Not Existent')
| 30.463415
| 83
| 0.767814
| 164
| 1,249
| 5.457317
| 0.365854
| 0.110615
| 0.093855
| 0.107263
| 0.236872
| 0.236872
| 0.236872
| 0.172067
| 0.172067
| 0
| 0
| 0.001905
| 0.159327
| 1,249
| 40
| 84
| 31.225
| 0.850476
| 0
| 0
| 0
| 0
| 0
| 0.020817
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.222222
| false
| 0.037037
| 0.222222
| 0
| 0.481481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b13fbf54481cade8e8734d48b08412beb1ed9cd
| 4,009
|
py
|
Python
|
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | 1
|
2020-02-03T18:31:20.000Z
|
2020-02-03T18:31:20.000Z
|
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | null | null | null |
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | null | null | null |
"""
Contains unit tests to ensure single database items are created correctly in a
Pascal VOC compatible format.
"""
import os
from xml.etree.ElementTree import Element, SubElement
import numpy as np
from breakdb.io.export.voc import create_annotation
from tests.helpers.dataset import create_random_string
from tests.helpers.xml import match
class TestCreateAnnotation:
"""
Test suite for :function: 'create_annotation'.
"""
def test_create_annotation_does_not_create_annotation_if_empty(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
match(xml, expected)
def test_create_annotation_creates_well_formed_xml(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
coords = [coord for coords in zip(x, y) for coord in coords]
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [coords])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
obj = SubElement(expected, 'object')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
name = SubElement(obj, "name")
pose = SubElement(obj, "pose")
truncated = SubElement(obj, "truncated")
difficult = SubElement(obj, "difficult")
bndbox = SubElement(obj, "bndbox")
x_min = SubElement(bndbox, "xmin")
y_min = SubElement(bndbox, "ymin")
x_max = SubElement(bndbox, "xmax")
y_max = SubElement(bndbox, "ymax")
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
name.text = f"{os.path.basename(os.path.splitext(file_path)[0])}-1"
pose.text = "Unspecified"
truncated.text = "0"
difficult.text = "0"
x_min.text = str(np.min(x))
y_min.text = str(np.min(y))
x_max.text = str(np.max(x))
y_max.text = str(np.max(y))
match(xml, expected)
| 32.860656
| 78
| 0.626091
| 489
| 4,009
| 5.00818
| 0.214724
| 0.095549
| 0.049
| 0.0294
| 0.651695
| 0.619028
| 0.619028
| 0.619028
| 0.619028
| 0.619028
| 0
| 0.018629
| 0.250187
| 4,009
| 121
| 79
| 33.132231
| 0.796075
| 0.038663
| 0
| 0.642857
| 0
| 0
| 0.074393
| 0.013573
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.071429
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b1fa47c925f46978fe64a19c7b80b111b447a75
| 2,798
|
py
|
Python
|
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | null | null | null |
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | 39
|
2021-10-31T16:51:39.000Z
|
2021-11-22T09:56:04.000Z
|
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | null | null | null |
"""
Clase para representar a los diferentes modelos y su comportamiento
atributos(de momento)
df=dataframe de entrenamiento proviniente del conjunto de datos de entrenamiento del usuario
x_train,x_test,y_train,y_test, particiones de df para entrenar el modelo
El resto de métodos son autoexplicativos
"""
from numpy import array
from pandas.core.frame import DataFrame
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
class Modelo:
#Inicializa un modelo tomando sus datos
def __init__(self,data):
self.df = data
self.X_train = None
self.X_test = None
self.y_train = None
self.y_test = None
self.y_pred = None
self.modelo=LogisticRegression()
# Devuelve una particion del dataframe
def realizar_particion(self,cols_atributos:array):
aux = self.df.copy(deep=True)
return aux[cols_atributos]
#Realiza una particion en train y test
def particion_train_test(self,X:DataFrame, y:DataFrame, test_porcentaje:int):
try:
self.X_train,self.X_test,self.y_train,self.y_test=train_test_split(
X,y,test_size=test_porcentaje,random_state=0)
return True
except:
return False
#Entrena el modelo con los datos de entrenamiento
def entrenar(self):
try:
self.modelo.fit(self.X_train, self.y_train)
return True
except Exception as e:
print(e)
return False
#Realiza una prediccion sobre el conjunto de entrenamiento
def predecir_entrenamiento(self):
try:
self.y_pred = self.modelo.predict(self.X_test)
return True
except:
return False
#devuelve las métricas de rendimiento del modelo en entrenamiento
def get_metricas_rendimiento(self):
accuracy = metrics.accuracy_score(self.y_test, self.y_pred)
precision = metrics.precision_score(self.y_test, self.y_pred, zero_division=0)
recall = metrics.recall_score(self.y_test, self.y_pred)
f1 = metrics.f1_score(self.y_test, self.y_pred)
return [accuracy,precision,recall,f1]
#Devuelve las métricas para la matriz de confusion
def get_metricas_matriz_confusion(self):
return metrics.confusion_matrix(self.y_test,self.y_pred)
def get_metricas_roc(self):
y_pred_proba = self.modelo.predict_proba(self.X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
roc_data = pd.DataFrame([])
roc_data['True Positive'] = tpr
roc_data['False Positive'] = fpr
return roc_data
| 36.337662
| 92
| 0.686919
| 387
| 2,798
| 4.764858
| 0.29199
| 0.05423
| 0.043926
| 0.035249
| 0.132321
| 0.103037
| 0.093275
| 0.043384
| 0.043384
| 0.043384
| 0
| 0.002824
| 0.240529
| 2,798
| 76
| 93
| 36.815789
| 0.864941
| 0.225161
| 0
| 0.240741
| 0
| 0
| 0.012535
| 0
| 0
| 0
| 0
| 0.013158
| 0
| 1
| 0.148148
| false
| 0
| 0.111111
| 0.018519
| 0.462963
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b26f2f9d05f6e347a28ccd82f8bc4ee81785946
| 808
|
py
|
Python
|
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
from django.shortcuts import render
from .models import *
def all_product(request):
products = Product.objects.all()
context = {
'products':products,
}
return render(request, 'essEcommerce/all_product.html', context)
def cart(request):
if request.user.is_authenticated:
customer = request.user.customer
order , create = Order.objects.get_or_create(customer=customer, status=False)
items = order.orderitem_set.all()
else:
items = []
order = {
'get_cart_total':0,
'get_cart_total_price':0
}
context = {
'order':order,
'items':items,
}
return render(request, 'essEcommerce/cart.html', context)
def check_out(request):
return render(request, 'essEcommerce/checkout.html')
| 26.064516
| 85
| 0.634901
| 89
| 808
| 5.629213
| 0.426966
| 0.071856
| 0.113772
| 0.185629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003306
| 0.251238
| 808
| 30
| 86
| 26.933333
| 0.824793
| 0
| 0
| 0.076923
| 0
| 0
| 0.159653
| 0.095297
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0.076923
| 0.038462
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.