hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9a409844ea8ff87b62a343aba1bddbe1b4acc686
| 649
|
py
|
Python
|
Toolkits/VCS/mygulamali__repo-mine/mine/helpers.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | null | null | null |
Toolkits/VCS/mygulamali__repo-mine/mine/helpers.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | null | null | null |
Toolkits/VCS/mygulamali__repo-mine/mine/helpers.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | null | null | null |
from sys import stdout
def print_action(action):
def print_action_decorator(function):
def puts(string):
stdout.write(string)
stdout.flush()
def function_wrapper(*args, **kwargs):
puts("{0}... ".format(action))
return_value = function(*args, **kwargs)
puts("Done!\n")
return return_value
return function_wrapper
return print_action_decorator
def format_plot_axes(axes):
axes.xaxis.set_ticks_position('bottom')
axes.yaxis.set_ticks_position('none')
axes.spines['top'].set_color('none')
axes.spines['right'].set_color('none')
| 27.041667
| 52
| 0.628659
| 77
| 649
| 5.077922
| 0.454545
| 0.084399
| 0.071611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002041
| 0.244992
| 649
| 23
| 53
| 28.217391
| 0.795918
| 0
| 0
| 0
| 0
| 0
| 0.061633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| false
| 0
| 0.055556
| 0
| 0.5
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9a4d61b4c436761ff6069be2e39ac836e18b0130
| 1,540
|
py
|
Python
|
tests/regressions/python/942_lazy_fmap.py
|
NanmiaoWu/phylanx
|
295b5f82cc39925a0d53e77ba3b6d02a65204535
|
[
"BSL-1.0"
] | 83
|
2017-08-27T15:09:13.000Z
|
2022-01-18T17:03:41.000Z
|
tests/regressions/python/942_lazy_fmap.py
|
NanmiaoWu/phylanx
|
295b5f82cc39925a0d53e77ba3b6d02a65204535
|
[
"BSL-1.0"
] | 808
|
2017-08-27T15:35:01.000Z
|
2021-12-14T17:30:50.000Z
|
tests/regressions/python/942_lazy_fmap.py
|
NanmiaoWu/phylanx
|
295b5f82cc39925a0d53e77ba3b6d02a65204535
|
[
"BSL-1.0"
] | 55
|
2017-08-27T15:09:22.000Z
|
2022-03-25T12:07:34.000Z
|
# Copyright (c) 2019 Bita Hasheminezhad
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# #942: `fold_left`, `fold_right` and `fmap` do not work with a lazy function
import numpy as np
from phylanx import Phylanx, PhylanxSession, execution_tree
PhylanxSession.init(1)
def variable(value, dtype=None, name=None, constraint=None):
if dtype is None:
dtype = "float32"
if constraint is not None:
raise TypeError("Constraint is the projection function to be "
"applied to the variable after an optimizer update")
if isinstance(value, execution_tree.variable):
if dtype is not None:
value.dtype = dtype
if name is not None:
value.name = name
return value
return execution_tree.variable(value, dtype=dtype, name=name)
def eval(func):
return func.eval()
def fmap(fn, elems):
pass # make flake happy
@Phylanx
def map_fn_eager(fn, elems, dtype=None):
return fmap(fn, elems)
def map_fn(fn, elems, dtype=None):
return map_fn_eager.lazy(fn, elems, dtype)
@Phylanx
def sum_eager(x, axis=None, keepdims=False):
return np.sum(x, axis, keepdims)
sum = Phylanx.lazy(sum_eager)
def test_map(x):
return eval(map_fn(sum, variable(x)))
result = test_map(np.array([[1, 2, 3]]))
assert(np.all(result == [6])), result
result = test_map(np.array([1, 2, 3]))
assert(np.all(result == [1, 2, 3])), result
| 24.0625
| 79
| 0.670779
| 233
| 1,540
| 4.347639
| 0.39485
| 0.034551
| 0.026654
| 0.023692
| 0.122409
| 0.078973
| 0.078973
| 0.078973
| 0.078973
| 0.078973
| 0
| 0.021523
| 0.215584
| 1,540
| 63
| 80
| 24.444444
| 0.817053
| 0.180519
| 0
| 0.057143
| 0
| 0
| 0.079808
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 1
| 0.2
| false
| 0.028571
| 0.057143
| 0.142857
| 0.457143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
9a5cc32eb8d423266537616c2fd2072b4114deb3
| 2,258
|
py
|
Python
|
fabric_cm/credmgr/swagger_server/__main__.py
|
fabric-testbed/CredentialManager
|
da8ce54ab78544ff907af81d8cd7723ff48f6652
|
[
"MIT"
] | 1
|
2021-05-24T17:20:07.000Z
|
2021-05-24T17:20:07.000Z
|
fabric_cm/credmgr/swagger_server/__main__.py
|
fabric-testbed/CredentialManager
|
da8ce54ab78544ff907af81d8cd7723ff48f6652
|
[
"MIT"
] | 4
|
2021-06-07T16:18:45.000Z
|
2021-06-29T20:13:21.000Z
|
fabric_cm/credmgr/swagger_server/__main__.py
|
fabric-testbed/CredentialManager
|
da8ce54ab78544ff907af81d8cd7723ff48f6652
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2020 FABRIC Testbed
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Author Komal Thareja ([email protected])
"""
Main Entry Point
"""
import os
import signal
import connexion
import prometheus_client
import waitress
from flask import jsonify
from fabric_cm.credmgr.swagger_server import encoder
from fabric_cm.credmgr.config import CONFIG_OBJ
from fabric_cm.credmgr.logging import LOG
def main():
"""
Main Entry Point
"""
log = LOG
try:
app = connexion.App(__name__, specification_dir='swagger/')
app.app.json_encoder = encoder.JSONEncoder
app.add_api('swagger.yaml',
arguments={'title': 'Fabric Credential Manager API'},
pythonic_params=True)
port = CONFIG_OBJ.get_rest_port()
# prometheus server
prometheus_port = CONFIG_OBJ.get_prometheus_port()
prometheus_client.start_http_server(prometheus_port)
# Start up the server to expose the metrics.
waitress.serve(app, port=port)
except Exception as ex:
log.error("Exception occurred while starting Flask app")
log.error(ex)
raise ex
if __name__ == '__main__':
main()
| 32.724638
| 80
| 0.724978
| 312
| 2,258
| 5.141026
| 0.525641
| 0.054863
| 0.022444
| 0.035536
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003906
| 0.206377
| 2,258
| 68
| 81
| 33.205882
| 0.891183
| 0.542516
| 0
| 0
| 0
| 0
| 0.106599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.333333
| 0
| 0.37037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
9a67bbeeb8843ddedf058092d195c66fcbe342a3
| 1,881
|
py
|
Python
|
waveguide/waveguide_test.py
|
DentonGentry/gfiber-platform
|
2ba5266103aad0b7b676555eebd3c2061ddb8333
|
[
"Apache-2.0"
] | 8
|
2017-09-24T03:11:46.000Z
|
2021-08-24T04:29:14.000Z
|
waveguide/waveguide_test.py
|
DentonGentry/gfiber-platform
|
2ba5266103aad0b7b676555eebd3c2061ddb8333
|
[
"Apache-2.0"
] | null | null | null |
waveguide/waveguide_test.py
|
DentonGentry/gfiber-platform
|
2ba5266103aad0b7b676555eebd3c2061ddb8333
|
[
"Apache-2.0"
] | 1
|
2017-10-05T23:04:10.000Z
|
2017-10-05T23:04:10.000Z
|
#!/usr/bin/python
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import waveguide
from wvtest import wvtest
class FakeOptDict(object):
"""A fake options.OptDict containing default values."""
def __init__(self):
self.status_dir = '/tmp/waveguide'
@wvtest.wvtest
def IwTimeoutTest():
old_timeout = waveguide.IW_TIMEOUT_SECS
waveguide.IW_TIMEOUT_SECS = 1
old_path = os.environ['PATH']
os.environ['PATH'] = 'fake:' + os.environ['PATH']
waveguide.RunProc(lambda e, so, se: wvtest.WVPASSEQ(e, -9),
['iw', 'sleepn', str(waveguide.IW_TIMEOUT_SECS + 1)])
os.environ['PATH'] = old_path
waveguide.IW_TIMEOUT_SECS = old_timeout
@wvtest.wvtest
def ParseDevListTest():
waveguide.opt = FakeOptDict()
old_path = os.environ['PATH']
os.environ['PATH'] = 'fake:' + os.environ['PATH']
managers = []
waveguide.CreateManagers(managers, False, False, None)
got_manager_summary = set((m.phyname, m.vdevname, m.primary)
for m in managers)
want_manager_summary = set((
('phy1', 'wlan1', True),
('phy1', 'wlan1_portal', False),
('phy0', 'wlan0', True),
('phy0', 'wlan0_portal', False)))
wvtest.WVPASSEQ(got_manager_summary, want_manager_summary)
os.environ['PATH'] = old_path
if __name__ == '__main__':
wvtest.wvtest_main()
| 28.938462
| 74
| 0.696438
| 255
| 1,881
| 4.988235
| 0.509804
| 0.056604
| 0.081761
| 0.069182
| 0.146226
| 0.078616
| 0.078616
| 0.078616
| 0.078616
| 0.078616
| 0
| 0.012314
| 0.179692
| 1,881
| 64
| 75
| 29.390625
| 0.812054
| 0.335991
| 0
| 0.235294
| 0
| 0
| 0.099187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0.058824
| 0.088235
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7be972ac4586def48187bfcf50e95c9e16542c4d
| 361
|
py
|
Python
|
Python Advanced Retake Exam - 16 Dec 2020/Problem 3- Magic triangle - Pascal.py
|
DiyanKalaydzhiev23/Advanced---Python
|
ed2c60bb887c49e5a87624719633e2b8432f6f6b
|
[
"MIT"
] | null | null | null |
Python Advanced Retake Exam - 16 Dec 2020/Problem 3- Magic triangle - Pascal.py
|
DiyanKalaydzhiev23/Advanced---Python
|
ed2c60bb887c49e5a87624719633e2b8432f6f6b
|
[
"MIT"
] | null | null | null |
Python Advanced Retake Exam - 16 Dec 2020/Problem 3- Magic triangle - Pascal.py
|
DiyanKalaydzhiev23/Advanced---Python
|
ed2c60bb887c49e5a87624719633e2b8432f6f6b
|
[
"MIT"
] | null | null | null |
def get_magic_triangle(n):
triangle = [[1], [1, 1]]
for _ in range(2, n):
row = [1]
last_row = triangle[-1]
for i in range(1, len(last_row)):
num = last_row[i-1] + last_row[i]
row.append(num)
row.append(1)
triangle.append(row)
return triangle
get_magic_triangle(5)
| 21.235294
| 46
| 0.509695
| 51
| 361
| 3.431373
| 0.352941
| 0.16
| 0.182857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04329
| 0.360111
| 361
| 16
| 47
| 22.5625
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7beab3658ca8052cfa8c2cfea3b8cd3bd3c9a157
| 262
|
py
|
Python
|
py4mc/__init__.py
|
capslock321/py4mc
|
aad43d33f2ab1d264f0b86a84c80823309677994
|
[
"MIT"
] | null | null | null |
py4mc/__init__.py
|
capslock321/py4mc
|
aad43d33f2ab1d264f0b86a84c80823309677994
|
[
"MIT"
] | null | null | null |
py4mc/__init__.py
|
capslock321/py4mc
|
aad43d33f2ab1d264f0b86a84c80823309677994
|
[
"MIT"
] | null | null | null |
from .api import MojangApi
from .dispatcher import Dispatch
from .exceptions import (
ApiException,
ResourceNotFound,
InternalServerException,
UserNotFound,
)
__version__ = "0.0.1a"
__license__ = "MIT"
__author__ = "capslock321"
| 17.466667
| 33
| 0.698473
| 23
| 262
| 7.434783
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029557
| 0.225191
| 262
| 14
| 34
| 18.714286
| 0.812808
| 0
| 0
| 0
| 0
| 0
| 0.080645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.272727
| 0
| 0.272727
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7bf5036dc7b11f3015385fa7ebed58f2c40e9c71
| 262
|
py
|
Python
|
src/cs2mako/patterns.py
|
eventbrite/cs2mako
|
163affcc764a574b4af543c3520b7f345992973a
|
[
"MIT"
] | null | null | null |
src/cs2mako/patterns.py
|
eventbrite/cs2mako
|
163affcc764a574b4af543c3520b7f345992973a
|
[
"MIT"
] | null | null | null |
src/cs2mako/patterns.py
|
eventbrite/cs2mako
|
163affcc764a574b4af543c3520b7f345992973a
|
[
"MIT"
] | 2
|
2015-04-03T05:35:36.000Z
|
2021-09-08T11:48:27.000Z
|
# Copyright (c) 2014 Eventbrite, Inc. All rights reserved.
# See "LICENSE" file for license.
import re
open_r_str = r'\<\?cs\s*([a-zA-Z]+)([:]|\s)'
close_r_str = r'\<\?cs\s*/([a-zA-Z]+)\s*\?\>'
open_r = re.compile(open_r_str)
close_r = re.compile(close_r_str)
| 26.2
| 58
| 0.637405
| 49
| 262
| 3.204082
| 0.489796
| 0.101911
| 0.101911
| 0.089172
| 0.165605
| 0.165605
| 0.165605
| 0.165605
| 0.165605
| 0
| 0
| 0.017316
| 0.118321
| 262
| 9
| 59
| 29.111111
| 0.662338
| 0.335878
| 0
| 0
| 0
| 0
| 0.327485
| 0.327485
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7bfb8c398b66afff9f9537190851684dffe009d8
| 189
|
py
|
Python
|
basics.py
|
c25l/longmont_data_science_tensorflow
|
78302ab5b76a1e4632deda164615b4861c21f534
|
[
"MIT"
] | null | null | null |
basics.py
|
c25l/longmont_data_science_tensorflow
|
78302ab5b76a1e4632deda164615b4861c21f534
|
[
"MIT"
] | null | null | null |
basics.py
|
c25l/longmont_data_science_tensorflow
|
78302ab5b76a1e4632deda164615b4861c21f534
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import tensorflow as tf
x=tf.Variable(0.5)
y = x*x
sess = tf.Session()
sess.run(tf.global_variables_initializer())
print("x =",sess.run(x))
print("y =",sess.run(y))
| 18.9
| 43
| 0.687831
| 35
| 189
| 3.657143
| 0.571429
| 0.164063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017647
| 0.100529
| 189
| 9
| 44
| 21
| 0.735294
| 0.111111
| 0
| 0
| 0
| 0
| 0.035928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.285714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0056587271ff8ce0d2628ab99ab1c7bc8e2f7e9
| 558
|
py
|
Python
|
data/Carp.py
|
shebang-sh/npb-ouenka-bot
|
6fc6f7c1717632c3845496c309560233a9c73d8e
|
[
"MIT"
] | null | null | null |
data/Carp.py
|
shebang-sh/npb-ouenka-bot
|
6fc6f7c1717632c3845496c309560233a9c73d8e
|
[
"MIT"
] | 14
|
2022-03-29T09:07:31.000Z
|
2022-03-30T02:37:07.000Z
|
data/Carp.py
|
shebang-sh/npb-ouenka-bot
|
6fc6f7c1717632c3845496c309560233a9c73d8e
|
[
"MIT"
] | null | null | null |
data={
"田中広輔":"赤く燃え上がる 夢見たこの世界で 研ぎ澄ませそのセンス 打てよ広輔",
"長野久義":"歓声を背に受け 頂をみつめて 紅一筋に 突き進め長野",
"安部友裕":"新しい時代に 今手を伸ばせ 終わらぬ夢の先に 導いてくれ",
"堂林翔太":"光り輝く その道を 翔けぬけて魅せろ 堂林SHOW TIME!",
"會澤翼":"いざ大空へ翔ばたけ 熱い想い乗せ 勝利へ導く一打 決めろよ翼",
"菊池涼介":"【前奏:始まりの鐘が鳴る 広島伝説】\n光を追い越して メーター振りきり駆け抜けろ 止まらないぜ 韋駄天菊池",
"野間峻祥":"鋭い打球飛ばせ 自慢の俊足魅せろ 赤い流星のように 走れ峻祥",
"磯村嘉孝":"【前奏:当たると痛えぞ! 磯村パンチ】\n解き放てよこの瞬間 エンジン全開さあスパーク いざ満タンフルパワーで 撃て磯村パンチ",
"小園海斗":"新たな息吹を 注ぎ込め 世代のトップを走れ ぶっちぎれ",
"松山竜平":"闘志を燃やし 鋭く振り抜け さあかっとばせ さあ打ちまくれ 我等の松山",
"西川龍馬":"気高きその勇姿 比ぶ者は無いさ 我等に希望の光 見せてくれ",
}
| 42.923077
| 77
| 0.691756
| 64
| 558
| 6.03125
| 0.984375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 558
| 13
| 78
| 42.923077
| 0.830108
| 0
| 0
| 0
| 0
| 0
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0159e9e7bbcc76b698e4bc18244e26a32e8736f
| 1,068
|
py
|
Python
|
rllib/environment/mdps/__init__.py
|
shenao-zhang/DCPU
|
0da9aa2b7878b54ba4ee4dca894c2e86cdc0d559
|
[
"MIT"
] | 8
|
2020-10-23T07:52:19.000Z
|
2022-03-06T13:35:12.000Z
|
rllib/environment/mdps/__init__.py
|
shenao-zhang/DCPU
|
0da9aa2b7878b54ba4ee4dca894c2e86cdc0d559
|
[
"MIT"
] | 3
|
2021-03-04T13:44:01.000Z
|
2021-03-23T09:57:50.000Z
|
rllib/environment/mdps/__init__.py
|
shenao-zhang/DCPU
|
0da9aa2b7878b54ba4ee4dca894c2e86cdc0d559
|
[
"MIT"
] | 3
|
2021-03-18T08:23:56.000Z
|
2021-07-06T11:20:12.000Z
|
"""Common MDPs in RL literature."""
from gym.envs.registration import register
from .baird_star import BairdStar
from .boyan_chain import BoyanChain
from .double_chain import DoubleChainProblem
from .grid_world import EasyGridWorld
from .random_mdp import RandomMDP
from .single_chain import SingleChainProblem
from .two_state import TwoStateProblem
register(id="BairdStar-v0", entry_point="rllib.environment.mdps.baird_star:BairdStar")
register(
id="BoyanChain-v0", entry_point="rllib.environment.mdps.boyan_chain:BoyanChain"
)
register(
id="DoubleChainProblem-v0",
entry_point="rllib.environment.mdps.double_chain:DoubleChainProblem",
)
register(
id="EasyGridWorld-v0", entry_point="rllib.environment.mdps.grid_world:EasyGridWorld"
)
register(id="RandomMDP-v0", entry_point="rllib.environment.mdps.random_mdp:RandomMDP")
register(
id="SingleChainProblem-v0",
entry_point="rllib.environment.mdps.single_chain:SingleChainProblem",
)
register(
id="TwoStateProblem-v0",
entry_point="rllib.environment.mdps.two_state:TwoStateProblem",
)
| 33.375
| 88
| 0.801498
| 130
| 1,068
| 6.423077
| 0.276923
| 0.083832
| 0.100599
| 0.142515
| 0.268263
| 0.268263
| 0
| 0
| 0
| 0
| 0
| 0.007194
| 0.088951
| 1,068
| 31
| 89
| 34.451613
| 0.850976
| 0.027154
| 0
| 0.178571
| 0
| 0
| 0.43272
| 0.363988
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d01da04f511cea7e2cb3c255526d51bbef8b8016
| 724
|
py
|
Python
|
models/todo.py
|
chidaobanjiu/Flask_Web
|
7f8d33086ca307ae7f1b998ed7d52e27fc625388
|
[
"MIT"
] | 1
|
2017-02-01T07:13:37.000Z
|
2017-02-01T07:13:37.000Z
|
models/todo.py
|
chidaobanjiu/mana2077
|
7f8d33086ca307ae7f1b998ed7d52e27fc625388
|
[
"MIT"
] | null | null | null |
models/todo.py
|
chidaobanjiu/mana2077
|
7f8d33086ca307ae7f1b998ed7d52e27fc625388
|
[
"MIT"
] | null | null | null |
from models import Mongua
class Todo(Mongua):
__field__ = Mongua.__fields__ + [
('title', str, ''),
('completed', bool, False),
]
@classmethod
def update(cls, id, form):
t = cls.find(id)
valid_names = [
'title',
'completed'
]
for key in form:
# 这里只应该更新我们想要更新的东西
if key in valid_names:
setattr(t, key, form[key])
t.save()
return t
@classmethod
def complete(cls, id, completed=True):
"""
用法很方便
Todo.complete(1)
Todo.complete(2, False)
"""
t = cls.find(id)
t.completed = completed
t.save()
return t
| 20.685714
| 42
| 0.476519
| 74
| 724
| 4.527027
| 0.486486
| 0.083582
| 0.047761
| 0.059701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004673
| 0.40884
| 724
| 34
| 43
| 21.294118
| 0.778037
| 0.088398
| 0
| 0.333333
| 0
| 0
| 0.045161
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.291667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d02b8e08d67537a0237e5ce61fdce30861d1d5de
| 2,534
|
py
|
Python
|
JTL/__init__.py
|
AgalmicVentures/JTL
|
967bc670bf696e0214a69bad619cf0148fec2fe6
|
[
"MIT"
] | 3
|
2017-12-06T04:35:24.000Z
|
2020-01-29T14:29:57.000Z
|
JTL/__init__.py
|
AgalmicVentures/JTL
|
967bc670bf696e0214a69bad619cf0148fec2fe6
|
[
"MIT"
] | null | null | null |
JTL/__init__.py
|
AgalmicVentures/JTL
|
967bc670bf696e0214a69bad619cf0148fec2fe6
|
[
"MIT"
] | 2
|
2018-01-06T13:18:07.000Z
|
2019-09-01T01:24:04.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2015-2021 Agalmic Ventures LLC (www.agalmicventures.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import json
import sys
def main():
"""
Runs the main JTL program.
:return: int
"""
#Parse arguments
parser = argparse.ArgumentParser(description='JSON Transformation Language')
parser.add_argument('-i', '--indent', default=4, type=int, help='Indentation amount.')
parser.add_argument('-t', '--transform-file', help='The name of the JSON file containing the transformation to run.')
parser.add_argument('transform', nargs='?', help='The transformation to run.')
arguments = parser.parse_args(sys.argv[1:])
#Load the transformation
if arguments.transform is None and arguments.transform_file is not None:
#From a file
with open(arguments.transform_file, 'r') as f:
transformStr = f.read()
elif arguments.transform is not None and arguments.transform_file is None:
#From the command line
transformStr = arguments.transform
else:
print('ERROR: Specify either a transform file or a transform')
return 1
transformData = json.loads(transformStr)
#Read the JSON in from stdin
#TODO: error handling
data = json.loads(sys.stdin.read())
#Transform the JSON
#TODO: cleaner way to do this
sys.path.append('.')
import Interpreter
result = Interpreter.transformJson(data, transformData)
#Output the result
print(json.dumps(result, indent=arguments.indent, sort_keys=True))
return 0
if __name__ == '__main__':
sys.exit(main())
| 35.194444
| 118
| 0.757695
| 368
| 2,534
| 5.173913
| 0.483696
| 0.046218
| 0.026786
| 0.023109
| 0.032563
| 0.032563
| 0
| 0
| 0
| 0
| 0
| 0.006066
| 0.154302
| 2,534
| 71
| 119
| 35.690141
| 0.882408
| 0.52644
| 0
| 0
| 0
| 0
| 0.204134
| 0
| 0
| 0
| 0
| 0.014085
| 0
| 1
| 0.038462
| false
| 0
| 0.153846
| 0
| 0.269231
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d02ea29aa2c01102b027e646e76a227470fdeefe
| 200
|
py
|
Python
|
applications/physbam/physbam-lib/External_Libraries/Archives/boost/tools/build/v2/test/test1.py
|
schinmayee/nimbus
|
170cd15e24a7a88243a6ea80aabadc0fc0e6e177
|
[
"BSD-3-Clause"
] | 20
|
2017-07-03T19:09:09.000Z
|
2021-09-10T02:53:56.000Z
|
applications/physbam/physbam-lib/External_Libraries/Archives/boost/tools/build/v2/test/test1.py
|
schinmayee/nimbus
|
170cd15e24a7a88243a6ea80aabadc0fc0e6e177
|
[
"BSD-3-Clause"
] | null | null | null |
applications/physbam/physbam-lib/External_Libraries/Archives/boost/tools/build/v2/test/test1.py
|
schinmayee/nimbus
|
170cd15e24a7a88243a6ea80aabadc0fc0e6e177
|
[
"BSD-3-Clause"
] | 9
|
2017-09-17T02:05:06.000Z
|
2020-01-31T00:12:01.000Z
|
#!/usr/bin/python
import BoostBuild
t = BoostBuild.Tester()
t.write("test.jam","""
actions unbuilt { } unbuilt all ;
ECHO "Hi" ;
""")
t.run_build_system("-ftest.jam", stdout="Hi\n")
t.pass_test()
| 14.285714
| 47
| 0.66
| 30
| 200
| 4.3
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 200
| 13
| 48
| 15.384615
| 0.737143
| 0.08
| 0
| 0
| 0
| 0
| 0.377049
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d04dee56b2ed832a66c6149983ca467bfbbbbafc
| 404
|
py
|
Python
|
[5]-RailFence-Cipher.py
|
anuj0809/Fundamentals-Of-Cryptography
|
e6f88dcbd5f63f6938ddf5825bf9395d5ede9fe1
|
[
"Apache-2.0"
] | null | null | null |
[5]-RailFence-Cipher.py
|
anuj0809/Fundamentals-Of-Cryptography
|
e6f88dcbd5f63f6938ddf5825bf9395d5ede9fe1
|
[
"Apache-2.0"
] | null | null | null |
[5]-RailFence-Cipher.py
|
anuj0809/Fundamentals-Of-Cryptography
|
e6f88dcbd5f63f6938ddf5825bf9395d5ede9fe1
|
[
"Apache-2.0"
] | null | null | null |
def threeRailEncrypt(plainText):
plainText = plainText.lower()
cipherText = ""
rail1 = ""
rail2 = ""
rail3 = ""
for i in range(len(plainText)):
if i%3 == 0:
rail1 += plainText[i]
elif i%3 == 1:
rail2 += plainText[i]
else:
rail3 += plainText[i]
cipherText = rail1 + rail2 + rail3
return cipherText
print(threeRailEncrypt("Vineet"))
| 19.238095
| 37
| 0.564356
| 43
| 404
| 5.302326
| 0.511628
| 0.131579
| 0.175439
| 0.219298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046099
| 0.30198
| 404
| 20
| 38
| 20.2
| 0.762411
| 0
| 0
| 0
| 0
| 0
| 0.014851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.125
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d04fcbb17bf03785d02041e016915bdc95d6404c
| 4,514
|
py
|
Python
|
tiddlyweb/web/validator.py
|
tiddlyweb/tiddlyweb
|
376bcad280e24d2de4d74883dc4d8369abcb2c28
|
[
"BSD-3-Clause"
] | 57
|
2015-02-01T21:03:34.000Z
|
2021-12-25T12:02:31.000Z
|
tiddlyweb/web/validator.py
|
tiddlyweb/tiddlyweb
|
376bcad280e24d2de4d74883dc4d8369abcb2c28
|
[
"BSD-3-Clause"
] | 6
|
2016-02-05T11:43:32.000Z
|
2019-09-05T13:38:49.000Z
|
tiddlyweb/web/validator.py
|
tiddlyweb/tiddlyweb
|
376bcad280e24d2de4d74883dc4d8369abcb2c28
|
[
"BSD-3-Clause"
] | 17
|
2015-05-12T08:53:23.000Z
|
2021-12-21T15:56:30.000Z
|
"""
A collection of routines for validating, santizing and otherwise messing
with content coming in from the web to be :py:class:`tiddlers
<tiddlyweb.model.tiddler.Tidder>`, :py:class:`bags
<tiddlyweb.model.bag.Bag>` or :py:class:`recipes
<tiddlyweb.model.recipe.Recipe>`.
The validators can be extended by adding functions to the ``BAG_VALIDATORS``,
``RECIPE_VALIDATORS`` and ``TIDDLER_VALIDATORS``. The functions take an
entity object, and an optional WSGI ``environ`` dict.
"""
class InvalidTiddlerError(Exception):
"""
The provided :py:class:`tiddler <tiddlyweb.model.tiddler.Tiddler>`
has not passed a validation routine and has been rejected.
The caller should stop processing and return an error to calling
code or user-agent.
"""
pass
class InvalidBagError(Exception):
"""
The provided :py:class:`bag <tiddlyweb.model.bag.Bag>` has not passed
a validation routine and has been rejected. The caller should stop
processing and return an error to calling code or user-agent.
"""
pass
class InvalidRecipeError(Exception):
"""
The provided :py:class:`recipe <tiddlyweb.model.recipe.Recipe>` has
not passed a validation routine and has been rejected. The caller
should stop processing and return an error to calling code or
user-agent.
"""
pass
def sanitize_desc(entity, environ):
"""
Strip any dangerous HTML which may be present in a :py:class:`bag
<tiddlyweb.model.bag.Bag>` or :py:class:`recipe
<tiddlyweb.model.recipe.Recipe>` description.
"""
desc = entity.desc
entity.desc = sanitize_html_fragment(desc)
BAG_VALIDATORS = [
sanitize_desc,
]
TIDDLER_VALIDATORS = []
RECIPE_VALIDATORS = [
sanitize_desc,
]
def validate_tiddler(tiddler, environ=None):
"""
Pass the :py:class:`tiddler <tiddlyweb.model.tiddler.Tiddler>`
to each of the functions in ``TIDDLER_VALIDATORS``, in order,
either changing the content of the tiddler's attributes, or if
some aspect of the tiddler can not be accepted raising
:py:class:`InvalidTiddlerError`.
``TIDDLER_VALIDATORS`` is an empty list which may be extended
by plugins.
``validate_tiddler`` is called from :py:mod:`web handlers
<tiddlyweb.web.handler>`, when the ``accept`` constraint on
the :py:class:`policy <tiddlyweb.model.policy.Policy>` of the
:py:class:`bag <tiddlyweb.model.bag.Bag>` containing the
tiddler does not pass.
"""
_validate(tiddler, environ, TIDDLER_VALIDATORS)
def validate_bag(bag, environ=None):
"""
Pass the :py:class:`bag <tiddlyweb.model.bag.Bag>` to each of
the functions in ``BAG_VALIDATORS``, in order, either changing
the content of the bags's attributes, or if some aspect of the
bag can not be accepted raising :py:class:`InvalidBagError`.
``BAG_VALIDATORS`` may be extended by plugins.
``validate_bag`` is called whenever a bag is ``PUT`` via HTTP.
"""
_validate(bag, environ, BAG_VALIDATORS)
def validate_recipe(recipe, environ=None):
"""
Pass the :py:class:`recipe <tiddlyweb.model.recipe.Recipe>` to
each of the functions in ``RECIPE_VALIDATORS``, in order, either
changing the content of the recipes's attributes, or if some aspect
of the recipe can not be accepted raising :py:class:`InvalidRecipeError`.
``RECIPE_VALIDATORS`` may be extended by plugins.
``validate_recipe`` is called whenever a recipe is ``PUT`` via HTTP.
"""
_validate(recipe, environ, RECIPE_VALIDATORS)
def _validate(entity, environ, validators):
"""
Validate the provided entity against the list of functions
in validators.
"""
if environ is None:
environ = {}
for validator in validators:
validator(entity, environ)
def sanitize_html_fragment(fragment):
"""
Santize an HTML ``fragment``, returning a copy of the fragment
that has been cleaned up.
"""
if fragment:
import html5lib
from html5lib.sanitizer import HTMLSanitizer
from html5lib.serializer.htmlserializer import HTMLSerializer
parser = html5lib.HTMLParser(tokenizer=HTMLSanitizer)
parsed = parser.parseFragment(fragment)
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(parsed)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False)
output = serializer.render(stream)
return output
else:
return fragment
| 31.347222
| 77
| 0.699158
| 583
| 4,514
| 5.35163
| 0.250429
| 0.035897
| 0.027244
| 0.032051
| 0.441026
| 0.408974
| 0.361859
| 0.213782
| 0.163782
| 0.119551
| 0
| 0.001394
| 0.205361
| 4,514
| 143
| 78
| 31.566434
| 0.868414
| 0.615419
| 0
| 0.119048
| 0
| 0
| 0.003422
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.071429
| 0.071429
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d053ccfb39ce30bd9ced8dc52651dfaad639314a
| 11,426
|
py
|
Python
|
app/routes.py
|
mrtoronto/FAIPD
|
8cb4df2577af515238ce6ee12e627b830bec67a6
|
[
"MIT"
] | null | null | null |
app/routes.py
|
mrtoronto/FAIPD
|
8cb4df2577af515238ce6ee12e627b830bec67a6
|
[
"MIT"
] | null | null | null |
app/routes.py
|
mrtoronto/FAIPD
|
8cb4df2577af515238ce6ee12e627b830bec67a6
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user, current_user, login_required
from werkzeug.urls import url_parse
from app import app, db
from app.forms import LoginForm, RegistrationForm, EditProfileForm, PostForm, \
ResetPasswordRequestForm, ResetPasswordForm, EditPostForm
from app.models import User, Post
from app.email import send_password_reset_email
@app.before_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
return render_template('home.html')
@app.route('/posts/<post_target_type>', methods=['GET', 'POST'])
def feed(post_target_type):
page = request.args.get('page', 1, type=int)
if post_target_type in ['school', 'student', 'company', 'pair']:
posts = Post.query.filter_by(post_target_type=post_target_type).order_by(Post.timestamp.desc())\
.paginate(page, app.config['POSTS_PER_PAGE'], False)
if post_target_type == 'school':
page_header = 'Opportunities for Universities'
elif post_target_type == 'student':
page_header = 'Opportunities for Students'
elif post_target_type == 'company':
page_header = 'Opportunities for Companies'
elif post_target_type == 'pair':
page_header = 'Opportunities for Paired University-Companies'
elif post_target_type == 'feed':
if not current_user.is_authenticated:
return(redirect(url_for('feed', post_target_type = 'explore')))
posts = current_user.followed_posts().order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
page_header = 'Followed Opportunities'
elif post_target_type == 'explore':
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
page_header = 'All Opportunities'
next_url = url_for('feed', page=posts.next_num, post_target_type = post_target_type) \
if posts.has_next else None
prev_url = url_for('feed', page=posts.prev_num, post_target_type = post_target_type) \
if posts.has_prev else None
return render_template('index.html',
post_target_type = post_target_type,
page_header=page_header,
posts=posts.items,
next_url=next_url,
prev_url=prev_url)
@app.route('/posts/create', methods=['GET', 'POST'])
@login_required
def make_a_post():
form = PostForm()
if current_user.user_type == 'student':
form.post_target_type.data = 'pair'
elif current_user.user_type == 'school':
form.post_target_type.data = 'company'
elif current_user.user_type == 'company':
form.post_target_type.data = 'school'
elif current_user.user_type == 'pair':
form.post_target_type.data = 'student'
else:
form.post_target_type.data = 'student'
if form.validate_on_submit() and current_user.is_authenticated:
post = Post(post_title = form.post_title.data,
body=form.body.data,
author=current_user,
post_origin_type = current_user.user_type,
post_target_type = form.post_target_type.data)
if (form.post_target_type.data == 'student' and current_user.user_type != 'pair') or \
(form.post_target_type.data == 'school' and current_user.user_type != 'company') or \
(form.post_target_type.data == 'company' and current_user.user_type != 'school') or \
(form.post_target_type.data == 'pair' and current_user.user_type != 'student'):
flash("Are you sure you set your user type correctly?")
return redirect(url_for('edit_profile'))
db.session.add(post)
db.session.commit()
flash('Your post is now live!')
return redirect(url_for('feed', post_target_type = 'explore'))
return render_template('make_a_post.html', form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data,
email=form.email.data,
user_type = form.user_type.data,
display_name=form.display_name.data,
affiliation=form.affiliation.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('Congratulations, you are now a registered user!')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route('/reset_password_request', methods=['GET', 'POST'])
def reset_password_request():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = ResetPasswordRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
send_password_reset_email(user)
flash('Check your email for the instructions to reset your password')
return redirect(url_for('login'))
return render_template('reset_password_request.html',
title='Reset Password', form=form)
@app.route('/reset_password/<token>', methods=['GET', 'POST'])
def reset_password(token):
if current_user.is_authenticated:
return redirect(url_for('index'))
user = User.verify_reset_password_token(token)
if not user:
return redirect(url_for('index'))
form = ResetPasswordForm()
if form.validate_on_submit():
user.set_password(form.password.data)
db.session.commit()
flash('Your password has been reset.')
return redirect(url_for('login'))
return render_template('reset_password.html', form=form)
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('user', username=user.username, page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('user', username=user.username, page=posts.prev_num) \
if posts.has_prev else None
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url)
@app.route('/post/<id>')
def post(id):
#page = request.args.get('page', 1, type=int)
post = Post.query.filter_by(id=id).first_or_404()
author = User.query.filter_by(id=post.user_id).first_or_404()
return render_template('post.html', post=post, author=author)
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
if form.username.data:
current_user.username = form.username.data
if form.about_me.data:
current_user.about_me = form.about_me.data
if form.display_name.data:
current_user.display_name = form.display_name.data
if form.affiliation.data:
current_user.affiliation = form.affiliation.data
if form.user_type.data:
current_user.user_type = form.user_type.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
form.affiliation.data = current_user.affiliation
form.display_name.data = current_user.display_name
form.user_type.data = current_user.user_type
return render_template('edit_profile.html', title='Edit Profile',
form=form)
@app.route('/post/<post_id>/edit', methods=['GET', 'POST'])
@login_required
def edit_post(post_id):
post = Post.query.filter_by(id=post_id).first_or_404()
form = EditPostForm()
form.body.default = post.body
form.post_target_type.default = post.post_target_type
form.post_title.default = post.post_title
if form.validate_on_submit():
post.post_title = form.post_title.data
post.body = form.body.data
post.post_target_type = form.post_target_type.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit_post', post_id=post_id))
elif request.method == 'GET':
form.post_target_type.data = post.post_target_type
form.post_title.data = post.post_title
form.body.data = post.body
return render_template('edit_post.html', title='Edit Profile',
form=form)
@app.route('/follow/<username>')
@login_required
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot follow yourself!')
return redirect(url_for('user', username=username))
current_user.follow(user)
db.session.commit()
flash('You are following {}!'.format(username))
return redirect(url_for('user', username=username))
@app.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot unfollow yourself!')
return redirect(url_for('user', username=username))
current_user.unfollow(user)
db.session.commit()
flash('You are not following {}.'.format(username))
return redirect(url_for('user', username=username))
| 40.953405
| 105
| 0.647033
| 1,443
| 11,426
| 4.898129
| 0.106722
| 0.050934
| 0.071307
| 0.059423
| 0.632711
| 0.542586
| 0.422467
| 0.359083
| 0.296406
| 0.232315
| 0
| 0.001712
| 0.233152
| 11,426
| 278
| 106
| 41.100719
| 0.804953
| 0.003851
| 0
| 0.291667
| 0
| 0
| 0.132307
| 0.008826
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.079167
| 0.033333
| 0.004167
| 0.229167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d0688c7557a32c0ad0f636ac14f06a163a2f4570
| 36,514
|
py
|
Python
|
BookDatabaseUtility.py
|
BitWorks/xbrlstudio
|
231beb46c56c8086f9fcc8846955667d947709c2
|
[
"MIT"
] | null | null | null |
BookDatabaseUtility.py
|
BitWorks/xbrlstudio
|
231beb46c56c8086f9fcc8846955667d947709c2
|
[
"MIT"
] | null | null | null |
BookDatabaseUtility.py
|
BitWorks/xbrlstudio
|
231beb46c56c8086f9fcc8846955667d947709c2
|
[
"MIT"
] | null | null | null |
"""
:mod: 'BookDatabaseUtility'
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. py:module:: BookDatabaseUtility
:copyright: Copyright BitWorks LLC, All rights reserved.
:license: MIT
:synopsis: SQLAlchemy ORM engine, metadata, and utility functions for working with dynamic sqlite databases
:description: Contains the following functions:
makeEntityTable - creates table 'entities' - columns = entity_cik, parent_cik, entity_name
makeFilingsTable - creates table 'filings####' - columns = entity_cik, q1, q2, q3, q4
getAllTables - returns a list of all SQLAlchemy Table objects
tableExists - determines whether a given table name exists in the database
getEntityTreeInfo - returns list of tuples, where each tuple is a row [(entity_cik, parent_cik, entity_name)]
getNameFromCik - uses a given cik to get an entity_name from the database
updateEntityParent - updates the parent cik of a given child cik; used when user alters entity tree view hierarchy
getEntityDict - returns a dict of the format {entity_name:entity_cik}, for all entities in database
getFilingTreeInfo - returns list of strings, where each string corresponds to a filing available for viewing
selectFromDatabase - given a cik and filing period, selects a Filing object from the database
existsInDatabase - determines whether a given filing exists in the database
manualExistsInDatabase - determines whether a given filing exists in the database, with input from user
addToEntitiesTable - updates 'entities' table to include a given entity, if not present
addToFilingsTable - updates a 'filings####' table to include a given filing, if not present
addToDatabase - adds a given fact file to the database in the form of a pickled Filing object
manualAddToDatabase - adds a given fact file to the database in the form of a pickled Filing object, with input from user
countEntityAndChildren - determines the breadth and depth of an entity tree in the database, used for status bar updates
removeEntityFromDatabase - removes a given entity (and all its children) from the database; currently an expensive function
removeFilingFromDatabase - removes a given filing item (and all its children) from the database; currently also expensive
updateEntityName - updates the name of an entity to that disclosed in the latest available filing
getLastFiling - returns the latest filing for a particular entity
renameEntityInDatabase(target_cik, new_entity_name) - manual replacement of the entity name with new_entity_name in the database
"""
try:
import pickle, sys, os, datetime, logging
database_utility_logger = logging.getLogger()
from sqlalchemy import (create_engine, Table, Column, Integer, String, PickleType)
from sqlalchemy.schema import MetaData
from sqlalchemy.pool import NullPool
# Tiered
# from . import (BookFilingUtility)
# Flat
import BookFilingUtility
except Exception as err:
database_utility_logger.error("{0}:BookDatabaseUtility import error:{1}".format(str(datetime.datetime.now()), str(err)))
def buildEngine(db_uri):
try:
global Engine
global Global_metadata
Engine = create_engine(os.path.join("sqlite:///{0}".format(db_uri)), poolclass = NullPool, echo = False)
Global_metadata = MetaData(bind = Engine, reflect = True)
except Exception as err:
database_utility_logger.error("{0}:buildEngine():{1}".format(str(datetime.datetime.now()), str(err)))
def makeEntityTable():
try:
global Global_metadata
ent = Table(
"entities",
Global_metadata,
Column("entity_cik", Integer, primary_key = True),
Column("parent_cik", Integer, nullable = True),
Column("entity_name", String(60))
)
Global_metadata.create_all(Engine)
except Exception as err:
database_utility_logger.error("{0}:makeEntityTable():{1}".format(str(datetime.datetime.now()), str(err)))
return
def makeFilingsTable(target_name):
try:
global Global_metadata
fil = Table(
target_name,
Global_metadata,
Column("entity_cik", Integer, primary_key = True),
Column("q1", PickleType, nullable = True),
Column("q2", PickleType, nullable = True),
Column("q3", PickleType, nullable = True),
Column("q4", PickleType, nullable = True)
)
Global_metadata.create_all(Engine)
except Exception as err:
database_utility_logger.error("{0}:makeFilingsTable():{1}".format(str(datetime.datetime.now()), str(err)))
return
def getAllTables():
try:
local_metadata = MetaData(bind = Engine, reflect = True)
tables = []
for table in local_metadata.sorted_tables:
tables.append(table)
return tables
except Exception as err:
database_utility_logger.error("{0}:getAllTables():{1}".format(str(datetime.datetime.now()), str(err)))
def tableExists(target_table_name):
try:
tables = getAllTables()
for table in tables:
if table.name == target_table_name:
return True
return False
except Exception as err:
database_utility_logger.error("{0}:tableExists():{1}".format(str(datetime.datetime.now()), str(err)))
def getEntityTreeInfo():
try:
connection = Engine.connect()
table_select = []
entity_list = []
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
select_stmt = table.select()
table_select = connection.execute(select_stmt).fetchall()
except Exception as err:
pass
for entry in table_select:
entity_list.append(entry)
connection.close()
return entity_list
except Exception as err:
database_utility_logger.error("{0}:getEntityTreeInfo():{1}".format(str(datetime.datetime.now()), str(err)))
def getNameFromCik(target_cik):
try:
connection = Engine.connect()
entity_name = None
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
table_select = connection.execute(select_stmt).fetchall()
entity_name = table_select[0][2]
except Exception as err:
pass
connection.close()
return entity_name
except Exception as err:
database_utility_logger.error("{0}:getNameFromCik():{1}".format(str(datetime.datetime.now()), str(err)))
def updateEntityParent(target_child_cik, target_parent_cik):
try:
target_child_cik = int(target_child_cik)
except Exception as err:
pass
try:
target_parent_cik = int(target_parent_cik)
except Exception as err:
pass
try:
connection = Engine.connect()
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_child_cik).values(parent_cik = target_parent_cik)
table_update = connection.execute(update_stmt)
except Exception as err:
pass
connection.close()
try:
if table_update.last_updated_params() is not None:
return_val = True
else:
return_val = False
except Exception as err:
return_val = False
return return_val
except Exception as err:
database_utility_logger.error("{0}:updateEntityParent() body:{1}".format(str(datetime.datetime.now()), str(err)))
def getEntityDict():
try:
connection = Engine.connect()
entity_dict = {} #key = entity_name, value = entity_cik
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
select_stmt = table.select()
table_select = connection.execute(select_stmt).fetchall()
for entry in table_select:
try:
entity_dict[entry[2]] = entry[0]
except Exception as err:
database_utility_logger.error("{0}:getEntityDict() inner:{1}".format(str(datetime.datetime.now()), str(err)))
except Exception as err:
database_utility_logger.error("{0}:getEntityDict() middle:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close()
return entity_dict
except Exception as err:
database_utility_logger.error("{0}:getEntityDict() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def getFilingTreeInfo(target_cik):
try:
target_cik = int(target_cik)
connection = Engine.connect()
filings = []
tables = getAllTables()
for table in tables:
if table.name.startswith("filings"):
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
table_select = connection.execute(select_stmt).fetchall()
if len(table_select) > 0:
if table_select[0][1] is not None:
filings.append(table.name[-4:] + "-Q1")
if table_select[0][2] is not None:
filings.append(table.name[-4:] + "-Q2")
if table_select[0][3] is not None:
filings.append(table.name[-4:] + "-Q3")
if table_select[0][4] is not None:
filings.append(table.name[-4:] + "-Q4")
except Exception as err:
database_utility_logger.error("{0}:getFilingTreeInfo() inner:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close()
return filings
except Exception as err:
database_utility_logger.error("{0}:getFilingTreeInfo() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def selectFromDatabase(target_cik, target_period):
try:
connection = Engine.connect()
target_cik = int(target_cik)
tables = getAllTables()
select_result = None
for table in tables:
if table.name == "filings{0}".format(target_period[2:6]):
if target_period[0:2] == "q1":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
select_result = connection.execute(select_stmt).first() #SA RowProxy
if select_result is not None:
try:
if select_result.items()[1][1] is not None:
select_result = pickle.loads(select_result.items()[1][1]) #1st: col #; 2nd: 0 = key, 1 = val
else:
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q1 inner:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q1 outer:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
elif target_period[0:2] == "q2":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
select_result = connection.execute(select_stmt).first()
if select_result is not None:
try:
if select_result.items()[2][1] is not None:
select_result = pickle.loads(select_result.items()[2][1])
else:
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q2 inner:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q2 outer:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
elif target_period[0:2] == "q3":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
select_result = connection.execute(select_stmt).first()
if select_result is not None:
try:
if select_result.items()[3][1] is not None:
select_result = pickle.loads(select_result.items()[3][1])
else:
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q3 inner:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q3 outer:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
elif target_period[0:2] == "q4":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
select_result = connection.execute(select_stmt).first()
if select_result is not None:
try:
if select_result.items()[4][1] is not None:
select_result = pickle.loads(select_result.items()[4][1])
else:
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q4 inner:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() q4 outer:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
else:
select_result = None
connection.close()
return select_result
except Exception as err:
database_utility_logger.error("{0}:selectFromDatabase() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def existsInDatabase(target_fact_uri, target_cik = None):
try:
return_vals = []
filing = BookFilingUtility.parseFactFile(target_fact_uri)
entity_cik_list, entity_parent_cik, entity_name, filing_period = BookFilingUtility.getFilingInfo(filing)
cell_list = []
if target_cik is not None:
cell = selectFromDatabase(target_cik, filing_period)
if cell is not None:
return_vals.append((target_cik, filing_period, cell))
else:
if len(entity_cik_list) >= 1:
for entity_cik in entity_cik_list:
cell = selectFromDatabase(entity_cik, filing_period)
cell_list.append((entity_cik, filing_period, cell))
for item in cell_list:
if item[2] is not None:
return_vals.append(item)
return return_vals
except Exception as err:
database_utility_logger.error("{0}:existsInDatabase():{1}".format(str(datetime.datetime.now()), str(err)))
def manualExistsInDatabase(manual_cik, manual_period):
try:
cell = selectFromDatabase(manual_cik, manual_period)
if cell is None:
return False
else:
return True
except Exception as err:
database_utility_logger.error("{0}:manualExistsInDatabase():{1}".format(str(datetime.datetime.now()), str(err)))
def addToEntitiesTable(target_entity_cik, target_parent_cik, target_entity_name):
try:
connection = Engine.connect()
tables = getAllTables()
present = False
for table in tables:
if table.name == "entities":
try:
select_stmt = table.select().where(table.columns.entity_cik == target_entity_cik)
select_result = connection.execute(select_stmt).first()
if select_result is not None:
present = True
else:
insert_stmt = table.insert().values(entity_cik = target_entity_cik,
parent_cik = target_parent_cik,
entity_name = target_entity_name)
insert_result = connection.execute(insert_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToEntitiesTable() inner:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close()
return present
except Exception as err:
database_utility_logger.error("{0}:addToEntitiesTable() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def addToFilingsTable(target_table_name, target_entity_cik, target_quarter, target_filing):
try:
target_filing = pickle.dumps(target_filing)
connection = Engine.connect()
tables = getAllTables()
present = False
for table in tables:
if table.name == target_table_name:
try:
select_stmt = table.select().where(table.columns.entity_cik == target_entity_cik)
select_result = connection.execute(select_stmt).first()
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select:{1}".format(str(datetime.datetime.now()), str(err)))
if select_result is not None:
if target_quarter == "q1":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_entity_cik).values(q1 = target_filing)
update_result = connection.execute(update_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result == None q1:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q2":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_entity_cik).values(q2 = target_filing)
update_result = connection.execute(update_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result == None q2:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q3":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_entity_cik).values(q3 = target_filing)
update_result = connection.execute(update_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result == None q3:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q4":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_entity_cik).values(q4 = target_filing)
update_result = connection.execute(update_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result == None q4:{1}".format(str(datetime.datetime.now()), str(err)))
else:
if target_quarter == "q1":
try:
insert_stmt = table.insert().values(entity_cik = target_entity_cik, q1 = target_filing)
insert_result = connection.execute(insert_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result != None q1:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q2":
try:
insert_stmt = table.insert().values(entity_cik = target_entity_cik, q2 = target_filing)
insert_result = connection.execute(insert_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result != None q2:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q3":
try:
insert_stmt = table.insert().values(entity_cik = target_entity_cik, q3 = target_filing)
insert_result = connection.execute(insert_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result != None q3:{1}".format(str(datetime.datetime.now()), str(err)))
elif target_quarter == "q4":
try:
insert_stmt = table.insert().values(entity_cik = target_entity_cik, q4 = target_filing)
insert_result = connection.execute(insert_stmt)
present = True
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() select_result != None q4:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close
return present
except Exception as err:
database_utility_logger.error("{0}:addToFilingsTable() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def addToDatabase(target_fact_uri, target_cik = None):
try:
filing = BookFilingUtility.parseFactFile(target_fact_uri)
target_cik_list, target_parent_cik, target_name, filing_period = BookFilingUtility.getFilingInfo(filing)
if target_cik is not None:
target_cik = int(target_cik)
else:
if len(target_cik_list) >= 1:
target_cik = int(target_cik_list[0])
filing_year = filing_period[2:6]
filing_quarter = filing_period[0:2]
filing_table_name = "filings" + filing_year
if target_cik == None:
return
tables = getAllTables()
filings_table_found = False
for table in tables:
if table.name == filing_table_name:
filings_table_found = True
if not filings_table_found:
makeFilingsTable(filing_table_name)
addToEntitiesTable(target_cik, target_parent_cik, target_name)
addToFilingsTable(filing_table_name, target_cik, filing_quarter, filing)
updateEntityName(target_cik)
except Exception as err:
database_utility_logger.error("{0}:addToDatabase():{1}".format(str(datetime.datetime.now()), str(err)))
return
def manualAddToDatabase(manual_cik, manual_name, manual_period, target_fact_uri):
try:
filing = BookFilingUtility.parseFactFile(target_fact_uri)
target_cik = int(manual_cik)
target_parent_cik = None
target_name = str(manual_name)
manual_period = str(manual_period)
filing_year = manual_period[2:6]
filing_quarter = manual_period[0:2]
filing_table_name = "filings" + filing_year
tables = getAllTables()
filings_table_found = False
for table in tables:
if table.name == filing_table_name:
filings_table_found = True
if not filings_table_found:
makeFilingsTable(filing_table_name)
addToEntitiesTable(target_cik, target_parent_cik, target_name)
addToFilingsTable(filing_table_name, target_cik, filing_quarter, filing)
updateEntityName(target_cik)
except Exception as err:
database_utility_logger.error("{0}:manualAddToDatabase():{1}".format(str(datetime.datetime.now()), str(err)))
return
def countEntityAndChildren(target_cik, count = 0):
try:
connection = Engine.connect()
target_cik = int(target_cik)
tables = getAllTables()
if len(tables) > 0:
for table in tables:
if table.exists() is True:
if table.name == "entities":
try:
entity_sel_stmt = table.select().where(table.columns.entity_cik == target_cik)
entity_sel_result = connection.execute(entity_sel_stmt).fetchall()
if entity_sel_result is not None:
count += len(entity_sel_result)
children_sel_stmt = table.select().where(table.columns.parent_cik == target_cik)
children_sel_result = connection.execute(children_sel_stmt).fetchall()
except Exception as err:
database_utility_logger.error("{0}:countEntityAndChildren() inner:{1}".format(str(datetime.datetime.now()), str(err)))
if children_sel_result is not None:
for result in children_sel_result:
count += countEntityAndChildren(result.entity_cik)
connection.close()
return count
except Exception as err:
database_utility_logger.error("{0}:countEntityAndChildren() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def removeEntityFromDatabase(book_main_window, target_cik, call = 0, total_items = 0):
try:
call += 1
if call == 1:
total_items = countEntityAndChildren(target_cik)
if total_items != 0:
progress = int(100 * call / total_items)
else:
if total_items != 0:
progress = int(100 * call / total_items)
book_main_window.updateProgressBar(progress)
children_sel_result = None
connection = Engine.connect()
target_cik = int(target_cik)
tables = getAllTables()
if len(tables) > 0:
for table in tables:
if table.exists() is True:
if table.name == "entities":
try:
parent_del_stmt = table.delete().where(table.columns.entity_cik == target_cik)
parent_del_result = connection.execute(parent_del_stmt)
children_sel_stmt = table.select().where(table.columns.parent_cik == target_cik)
children_sel_result = connection.execute(children_sel_stmt).fetchall()
except Exception as err:
pass
else:
try:
generic_del_stmt = table.delete().where(table.columns.entity_cik == target_cik)
generic_del_result = connection.execute(generic_del_stmt)
except Exception as err:
pass
if children_sel_result is not None:
for result in children_sel_result:
call = removeEntityFromDatabase(book_main_window, result.entity_cik,
call = call, total_items = total_items)
if len(tables) > 0:
for table in tables:
if table.exists() is True:
try:
generic_sel_stmt = table.select()
generic_sel_result = connection.execute(generic_sel_stmt).first()
if generic_sel_result is None and table.name != "entities":
table.drop(bind = Engine)
except Exception as err:
pass
if call == total_items:
connection.execute("VACUUM")
book_main_window.resetProgressBar()
connection.close()
return call
except Exception as err:
database_utility_logger.error("{0}:removeEntityFromDatabase():{1}".format(str(datetime.datetime.now()), str(err)))
def removeFilingFromDatabase(book_main_window, target_cik, target_period, call = 0, total_items = 0):
try:
call += 1
total_items = 3
progress = int(100 * call / total_items)
book_main_window.updateProgressBar(progress)
connection = Engine.connect()
target_cik = int(target_cik)
target_period = str(target_period)
if len(target_period) == 6:
target_quarter = target_period[0:2]
target_year = target_period[2:6]
target_table_name = "filings" + target_year
elif len(target_period) == 4:
target_year = target_period
target_table_name = "filings" + target_year
tables = getAllTables()
if len(tables) > 0:
for table in tables:
if table.exists() is True:
if table.name == target_table_name:
try:
if len(target_period) == 6:
if target_quarter == "q1":
del_stmt = table.update().where(table.columns.entity_cik == target_cik).values(q1 = None)
elif target_quarter == "q2":
del_stmt = table.update().where(table.columns.entity_cik == target_cik).values(q2 = None)
elif target_quarter == "q3":
del_stmt = table.update().where(table.columns.entity_cik == target_cik).values(q3 = None)
elif target_quarter == "q4":
del_stmt = table.update().where(table.columns.entity_cik == target_cik).values(q4 = None)
elif len(target_period) == 4:
del_stmt = table.delete().where(table.columns.entity_cik == target_cik)
connection.execute(del_stmt)
except Exception as err:
database_utility_logger.error("{0}:removeFilingFromDatabase() delete:{1}".format(str(datetime.datetime.now()), str(err)))
for table in tables:
if table.exists() is True:
try:
generic_sel_stmt = table.select()
generic_sel_result = connection.execute(generic_sel_stmt).first()
if generic_sel_result is None and table.name != "entities":
table.drop(bind = Engine)
except Exception as err:
database_utility_logger.error("{0}:removeFilingFromDatabase() table_drop:{1}".format(str(datetime.datetime.now()), str(err)))
call += 1
progress = int(100 * call / total_items)
book_main_window.updateProgressBar(progress)
connection.execute("VACUUM")
call += 1
progress = int(100 * call / total_items)
book_main_window.updateProgressBar(progress)
book_main_window.resetProgressBar()
connection.close()
return True
except Exception as err:
database_utility_logger.error("{0}:removeFilingFromDatabase() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def updateEntityName(target_cik):
try:
connection = Engine.connect()
last_filing = getLastFiling(target_cik)
target_entity_cik_list, entity_parent_cik, new_entity_name, filing_period = BookFilingUtility.getFilingInfo(last_filing)
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_cik).values(entity_name = new_entity_name)
update_result = connection.execute(update_stmt)
except Exception as err:
database_utility_logger.error("{0}:updateEntityName() inner:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close()
except Exception as err:
database_utility_logger.error("{0}:updateEntityName() outer:{1}".format(str(datetime.datetime.now()), str(err)))
return
def getLastFiling(target_cik):
try:
connection = Engine.connect()
tables = getAllTables()
select_result = None
target_cik = int(target_cik)
for table in reversed(tables):
if table.name.startswith("filings"):
try:
select_stmt = table.select().where(table.columns.entity_cik == target_cik)
select_result = connection.execute(select_stmt).first() #SA RowProxy
if select_result is not None: # entity is in table
try:
for col in reversed(select_result.items()):
if col[1] is not None: # latest filing
select_result = pickle.loads(col[1]) # [0 = key, 1 = val]
return select_result
else:
pass
except Exception as err:
database_utility_logger.error("{0}:getLastFiling() inner:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
except Exception as err:
database_utility_logger.error("{0}:getLastFiling() middle:{1}".format(str(datetime.datetime.now()), str(err)))
select_result = None
connection.close()
return select_result
except Exception as err:
database_utility_logger.error("{0}:getLastFiling() outer:{1}".format(str(datetime.datetime.now()), str(err)))
def renameEntityInDatabase(target_cik, new_entity_name):
try:
target_cik = int(target_cik)
new_entity_name = str(new_entity_name)
connection = Engine.connect()
tables = getAllTables()
for table in tables:
if table.name == "entities":
try:
update_stmt = table.update().where(table.columns.entity_cik == target_cik).values(entity_name = new_entity_name)
update_result = connection.execute(update_stmt)
except Exception as err:
database_utility_logger.error("{0}:renameEntityInDatabase() inner:{1}".format(str(datetime.datetime.now()), str(err)))
connection.close()
except Exception as err:
database_utility_logger.error("{0}:renameEntityInDatabase() outer:{1}".format(str(datetime.datetime.now()), str(err)))
return
| 50.019178
| 160
| 0.572082
| 3,812
| 36,514
| 5.289874
| 0.066632
| 0.029011
| 0.051426
| 0.060501
| 0.741433
| 0.71267
| 0.667245
| 0.650136
| 0.629854
| 0.568907
| 0
| 0.010988
| 0.334502
| 36,514
| 729
| 161
| 50.087792
| 0.818848
| 0.078162
| 0
| 0.655608
| 0
| 0
| 0.062838
| 0.033975
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036335
| false
| 0.014218
| 0.009479
| 0
| 0.086888
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0724f179ed3e0352058e826e5ed580723fd7c4d
| 1,250
|
py
|
Python
|
insights/parsers/tests/test_ansible_tower_settings.py
|
maxamillion/insights-core
|
aa11e07e68077df97b6c85219911f8382be6e2fa
|
[
"Apache-2.0"
] | null | null | null |
insights/parsers/tests/test_ansible_tower_settings.py
|
maxamillion/insights-core
|
aa11e07e68077df97b6c85219911f8382be6e2fa
|
[
"Apache-2.0"
] | null | null | null |
insights/parsers/tests/test_ansible_tower_settings.py
|
maxamillion/insights-core
|
aa11e07e68077df97b6c85219911f8382be6e2fa
|
[
"Apache-2.0"
] | null | null | null |
import doctest
import pytest
from insights.parsers import ansible_tower_settings, SkipException
from insights.tests import context_wrap
ANSIBLE_TOWER_CONFIG_CUSTOM = '''
AWX_CLEANUP_PATHS = False
LOGGING['handlers']['tower_warnings']['level'] = 'DEBUG'
'''.strip()
ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID1 = '''
'''.strip()
ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID2 = '''
AWX_CLEANUP_PATHS
'''.strip()
def test_ansible_tower_settings():
conf = ansible_tower_settings.AnsibleTowerSettings(context_wrap(ANSIBLE_TOWER_CONFIG_CUSTOM))
assert conf['AWX_CLEANUP_PATHS'] == 'False'
with pytest.raises(SkipException) as exc:
ansible_tower_settings.AnsibleTowerSettings(context_wrap(ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID1))
assert 'No Valid Configuration' in str(exc)
with pytest.raises(SkipException) as exc:
ansible_tower_settings.AnsibleTowerSettings(context_wrap(ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID2))
assert 'No Valid Configuration' in str(exc)
def test_ansible_tower_settings_documentation():
failed_count, tests = doctest.testmod(
ansible_tower_settings,
globs={'conf': ansible_tower_settings.AnsibleTowerSettings(context_wrap(ANSIBLE_TOWER_CONFIG_CUSTOM))}
)
assert failed_count == 0
| 32.051282
| 110
| 0.7848
| 148
| 1,250
| 6.236486
| 0.317568
| 0.195016
| 0.173348
| 0.182015
| 0.687974
| 0.531961
| 0.494041
| 0.420368
| 0.420368
| 0.420368
| 0
| 0.004566
| 0.124
| 1,250
| 38
| 111
| 32.894737
| 0.838356
| 0
| 0
| 0.25
| 0
| 0
| 0.1392
| 0.0368
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d08144c8fccf523fe00afd797e29a4fc88443666
| 851
|
py
|
Python
|
configuration.py
|
stoberblog/sunspec-modbus
|
2ce7cc6e92cb480cce4e488c8ffd716ec053ec01
|
[
"MIT"
] | 22
|
2018-03-01T16:13:48.000Z
|
2022-02-27T07:59:24.000Z
|
configuration.py
|
msgis/sunspec-modbus
|
2ce7cc6e92cb480cce4e488c8ffd716ec053ec01
|
[
"MIT"
] | null | null | null |
configuration.py
|
msgis/sunspec-modbus
|
2ce7cc6e92cb480cce4e488c8ffd716ec053ec01
|
[
"MIT"
] | 7
|
2019-03-02T17:10:29.000Z
|
2021-06-19T00:26:05.000Z
|
# -*- coding: utf-8 -*-
"""
@author: stoberblog
@detail: This is a configuration file for the Solar Modbus project.
"""
# MODBUS DETAILS
INVERTER_IP = "192.168.1.29"
MODBUS_PORT = 7502
METER_ADDR = 240
MODBUS_TIMEOUT = 30 #seconds to wait before failure
# METER INSTALLED
METER_INSTALLED = True
# DATABASE
DATABASE_TYPE = "mariadb" # Current options: mariadb
DATABASE_ADDR = "127.0.0.1"
DATABASE_USER = "sUser"
DATABASE_PASSWD = "sPasswd"
DATABASE_DB = "solarMB"
#SCHEDULER
SCHED_INTERVAL = 1 # Minutes between recollecting new data
# DATA
EPOCH_INVERTER = False # False = Use compueter time, True = get time off inverter (scheduler will still use compurter time)
POW_THERESHOLD = 10 # Watt threshold
LOG_LEVEL = "ERROR" # Levels: NONE, FATAL, ERROR, NOTICE, DEBUG
| 26.59375
| 129
| 0.673325
| 107
| 851
| 5.224299
| 0.757009
| 0.050089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043143
| 0.237368
| 851
| 31
| 130
| 27.451613
| 0.818182
| 0.504113
| 0
| 0
| 0
| 0
| 0.140541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d084cbae54b480c7285413e8320b17434455ebf4
| 1,666
|
py
|
Python
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 1
|
2018-06-07T12:40:37.000Z
|
2018-06-07T12:40:37.000Z
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 3
|
2018-06-07T14:39:19.000Z
|
2019-01-15T16:35:23.000Z
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 9
|
2018-05-30T17:12:27.000Z
|
2021-07-01T03:22:48.000Z
|
"""
Press run above to start
"""
from exercises.question_runner import run
from question_directory import (
boolean_operators,
boolean_review,
changing_lists,
dictionaries,
equality_and_booleans,
for_loops,
functions,
functions_quick_review,
greater_than_less_than_and_booleans,
inbuilt_functions_and_operators,
indexing_lists,
variables_equality_and_booleans,
while_loops,
)
from unit_tests.test_instructor_code import * # noqa
if input("\n\nPress enter to start\n") != "test":
# LESSON ONE
# https://kathrinschuler.github.io/slide-python-intro/#/10/3
run(equality_and_booleans.TASKS, equality_and_booleans.BLURB)
run(greater_than_less_than_and_booleans.TASKS, greater_than_less_than_and_booleans.BLURB)
# https://kathrinschuler.github.io/slide-python-intro/#/11/4
run(variables_equality_and_booleans.TASKS, variables_equality_and_booleans.BLURB)
run(boolean_operators.TASKS, boolean_operators.BLURB)
# LESSON TWO
run(inbuilt_functions_and_operators.TASKS, inbuilt_functions_and_operators.BLURB)
# LESSON THREE
# https://kathrinschuler.github.io/slide-python-intro/#/25/4
run(boolean_review.TASKS, boolean_review.BLURB)
run(while_loops.TASKS, while_loops.BLURB)
run(for_loops.TASKS, for_loops.BLURB)
run(functions.TASKS, functions.BLURB)
# LESSON FOUR
run(indexing_lists.TASKS, indexing_lists.BLURB)
run(functions_quick_review.TASKS, functions_quick_review.BLURB)
run(changing_lists.TASKS, changing_lists.BLURB)
run(dictionaries.TASKS, dictionaries.BLURB)
else:
if __name__ == "__main__":
unittest.main() # noqa
| 32.038462
| 93
| 0.758703
| 215
| 1,666
| 5.525581
| 0.302326
| 0.083333
| 0.09596
| 0.04798
| 0.229798
| 0.184343
| 0.108586
| 0
| 0
| 0
| 0
| 0.006342
| 0.148259
| 1,666
| 51
| 94
| 32.666667
| 0.830867
| 0.153661
| 0
| 0
| 0
| 0
| 0.027318
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.088235
| 0
| 0.088235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0879d6a8986f5d89d403d25cf640af496e2854b
| 2,390
|
py
|
Python
|
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google import showcase
metadata = (("showcase-trailer", "hello world"),)
def test_unary_stream(echo):
content = 'The hail in Wales falls mainly on the snails.'
responses = echo.expand({
'content': content,
}, metadata=metadata)
# Consume the response and ensure it matches what we expect.
# with pytest.raises(exceptions.NotFound) as exc:
for ground_truth, response in zip(content.split(' '), responses):
assert response.content == ground_truth
assert ground_truth == 'snails.'
assert responses.trailing_metadata() == metadata
def test_stream_unary(echo):
requests = []
requests.append(showcase.EchoRequest(content="hello"))
requests.append(showcase.EchoRequest(content="world!"))
response = echo.collect(iter(requests))
assert response.content == 'hello world!'
def test_stream_unary_passing_dict(echo):
requests = [{'content': 'hello'}, {'content': 'world!'}]
response = echo.collect(iter(requests))
assert response.content == 'hello world!'
def test_stream_stream(echo):
requests = []
requests.append(showcase.EchoRequest(content="hello"))
requests.append(showcase.EchoRequest(content="world!"))
responses = echo.chat(iter(requests), metadata=metadata)
contents = []
for response in responses:
contents.append(response.content)
assert contents == ['hello', 'world!']
assert responses.trailing_metadata() == metadata
def test_stream_stream_passing_dict(echo):
requests = [{'content': 'hello'}, {'content': 'world!'}]
responses = echo.chat(iter(requests), metadata=metadata)
contents = []
for response in responses:
contents.append(response.content)
assert contents == ['hello', 'world!']
assert responses.trailing_metadata() == metadata
| 32.297297
| 74
| 0.706695
| 291
| 2,390
| 5.735395
| 0.381443
| 0.03595
| 0.031156
| 0.079089
| 0.50749
| 0.50749
| 0.50749
| 0.50749
| 0.426603
| 0.426603
| 0
| 0.004065
| 0.176569
| 2,390
| 73
| 75
| 32.739726
| 0.844004
| 0.274059
| 0
| 0.641026
| 0
| 0
| 0.119255
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.128205
| false
| 0.051282
| 0.025641
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d0a007717c7cd5164028357f50c0a77b0adfbf09
| 243
|
py
|
Python
|
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 15 01:37:59 2019
@author: kfmah
"""
stuff = list()
stuff.append('python')
stuff.append('chuck')
stuff.sort()
print (stuff[0])
print (stuff.__getitem__(0))
print (list.__getitem__(stuff,0))
| 12.15
| 35
| 0.650206
| 36
| 243
| 4.166667
| 0.638889
| 0.146667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.144033
| 243
| 20
| 36
| 12.15
| 0.644231
| 0.304527
| 0
| 0
| 0
| 0
| 0.067901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
d0a31acaeb6c9427098272c4fe196ed9fa1d7cff
| 17,639
|
py
|
Python
|
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Telstra Messaging API
# Introduction <table><tbody><tr><td class = 'into_api' style='border:none;padding:0 0 0 0'><p>Send and receive SMS and MMS messages globally using Telstra's enterprise grade Messaging API. It also allows your application to track the delivery status of both sent and received messages. Get your dedicated Australian number, and start sending and receiving messages today.</p></td><td class = 'into_api_logo' style='width: 20%;border:none'><img class = 'api_logo' style='margin: -26px 0 0 0' src = 'https://test-telstra-retail-tdev.devportal.apigee.io/sites/default/files/messagingapi-icon.png'></td></tr></tbody></table> # Features The Telstra Messaging API provides the features below. | Feature | Description | | --- | --- | | `Dedicated Number` | Provision a mobile number for your account to be used as `from` address in the API | | `Send Messages` | Sending SMS or MMS messages | | `Receive Messages` | Telstra will deliver messages sent to a dedicated number or to the `notifyURL` defined by you | | `Broadcast Messages` | Invoke a single API call to send a message to a list of numbers provided in `to` | | `Delivery Status` | Query the delivery status of your messages | | `Callbacks` | Provide a notification URL and Telstra will notify your app when a message status changes | | `Alphanumeric Identifier` | Differentiate yourself by providing an alphanumeric string in `from`. This feature is only available on paid plans | | `Concatenation` | Send messages up to 1900 characters long and Telstra will automaticaly segment and reassemble them | | `Reply Request` | Create a chat session by associating `messageId` and `to` number to track responses received from a mobile number. We will store this association for 8 days | | `Character set` | Accepts all Unicode characters as part of UTF-8 | | `Bounce-back response` | See if your SMS hits an unreachable or unallocated number (Australia Only) | | `Queuing` | Messaging API will automatically queue and deliver each message at a compliant rate. | | `Emoji Encoding` | The API supports the encoding of the full range of emojis. Emojis in the reply messages will be in their UTF-8 format. | ## Delivery Notification or Callbacks The API provides several methods for notifying when a message has been delivered to the destination. 1. When you send a message there is an opportunity to specify a `notifyURL`. Once the message has been delivered the API will make a call to this URL to advise of the message status. 2. If you do not specify a URL you can always call the `GET /status` API to get the status of the message. # Getting Access to the API 1. Register at [https://dev.telstra.com](https://dev.telstra.com). 2. After registration, login to [https://dev.telstra.com](https://dev.telstra.com) and navigate to the **My apps** page. 3. Create your application by clicking the **Add new app** button 4. Select **API Free Trial** Product when configuring your application. This Product includes the Telstra Messaging API as well as other free trial APIs. Your application will be approved automatically. 5. There is a maximum of 1000 free messages per developer. Additional messages and features can be purchased from [https://dev.telstra.com](https://dev.telstra.com). 6. Note your `Client key` and `Client secret` as these will be needed to provision a number for your application and for authentication. Now head over to **Getting Started** where you can find a postman collection as well as some links to sample apps and SDKs to get you started. Happy Messaging! # Frequently Asked Questions **Q: Is creating a subscription via the Provisioning call a required step?** A. Yes. You will only be able to start sending messages if you have a provisioned dedicated number. Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: When trying to send an SMS I receive a `400 Bad Request` response. How can I fix this?** A. You need to make sure you have a provisioned dedicated number before you can send an SMS. If you do not have a provisioned dedicated number and you try to send a message via the API, you will get the error below in the response: <pre><code class=\"language-sh\">{ \"status\":\"400\", \"code\":\"DELIVERY-IMPOSSIBLE\", \"message\":\"Invalid \\'from\\' address specified\" }</code></pre> Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: How long does my dedicated number stay active for?** A. When you provision a dedicated number, by default it will be active for 30 days. You can use the `activeDays` parameter during the provisioning call to increment or decrement the number of days your dedicated number will remain active. Note that Free Trial apps will have 30 days as the maximum `activeDays` they can add to their provisioned number. If the Provisioning call is made several times within that 30-Day period, it will return the `expiryDate` in the Unix format and will not add any activeDays until after that `expiryDate`. **Q: Can I send a broadcast message using the Telstra Messaging API?** A. Yes. Recipient numbers can be in the form of an array of strings if a broadcast message needs to be sent, allowing you to send to multiple mobile numbers in one API call. A sample request body for this will be: `{\"to\":[\"+61412345678\",\"+61487654321\"],\"body\":\"Test Message\"}` **Q: Can I send SMS and MMS to all countries?** A. You can send SMS and MMS to all countries EXCEPT to countries which are subject to global sanctions namely: Burma, Côte d'Ivoire, Cuba, Iran, North Korea, Syria. **Q: Can I use `Alphanumeric Identifier` from my paid plan via credit card?** A. `Alphanumeric Identifier` is only available on Telstra Account paid plans, not through credit card paid plans. **Q: What is the maximum sized MMS that I can send?** A. This will depend on the carrier that will receive the MMS. For Telstra it's up to 2MB, Optus up to 1.5MB and Vodafone only allows up to 500kB. You will need to check with international carriers for thier MMS size limits. **Q: How is the size of an MMS calculated?** A. Images are scaled up to approximately 4/3 when base64 encoded. Additionally, there is approximately 200 bytes of overhead on each MMS. Assuming the maximum MMS that can be sent on Telstra’s network is 2MB, then the maximum image size that can be sent will be approximately 1.378MB (1.378 x 1.34 + 200, without SOAP encapsulation). **Q: How is an MMS classified as Small or Large?** A. MMSes with size below 600kB are classed as Small whereas those that are bigger than 600kB are classed as Large. They will be charged accordingly. **Q: Are SMILs supported by the Messaging API?** A. While there will be no error if you send an MMS with a SMIL presentation, the actual layout or sequence defined in the SMIL may not display as expected because most of the new smartphone devices ignore the SMIL presentation layer. SMIL was used in feature phones which had limited capability and SMIL allowed a *powerpoint type* presentation to be provided. Smartphones now have the capability to display video which is the better option for presentations. It is recommended that MMS messages should just drop the SMIL. **Q: How do I assign a delivery notification or callback URL?** A. You can assign a delivery notification or callback URL by adding the `notifyURL` parameter in the body of the request when you send a message. Once the message has been delivered, a notification will then be posted to this callback URL. **Q: What is the difference between the `notifyURL` parameter in the Provisoning call versus the `notifyURL` parameter in the Send Message call?** A. The `notifyURL` in the Provisoning call will be the URL where replies to the provisioned number will be posted. On the other hand, the `notifyURL` in the Send Message call will be the URL where the delivery notification will be posted, e.g. when an SMS has already been delivered to the recipient. # Getting Started Below are the steps to get started with the Telstra Messaging API. 1. Generate an OAuth2 token using your `Client key` and `Client secret`. 2. Use the Provisioning call to create a subscription and receive a dedicated number. 3. Send a message to a specific mobile number. ## Run in Postman <a href=\"https://app.getpostman.com/run-collection/ded00578f69a9deba256#?env%5BMessaging%20API%20Environments%5D=W3siZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X2lkIiwidmFsdWUiOiIiLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X3NlY3JldCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6ImFjY2Vzc190b2tlbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Imhvc3QiLCJ2YWx1ZSI6InRhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiQXV0aG9yaXphdGlvbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Im9hdXRoX2hvc3QiLCJ2YWx1ZSI6InNhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoibWVzc2FnZV9pZCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifV0=\"><img src=\"https://run.pstmn.io/button.svg\" alt=\"Run in Postman\"/></a> ## Sample Apps - [Perl Sample App](https://github.com/telstra/MessagingAPI-perl-sample-app) - [Happy Chat App](https://github.com/telstra/messaging-sample-code-happy-chat) - [PHP Sample App](https://github.com/developersteve/telstra-messaging-php) ## SDK Repos - [Messaging API - PHP SDK](https://github.com/telstra/MessagingAPI-SDK-php) - [Messaging API - Python SDK](https://github.com/telstra/MessagingAPI-SDK-python) - [Messaging API - Ruby SDK](https://github.com/telstra/MessagingAPI-SDK-ruby) - [Messaging API - NodeJS SDK](https://github.com/telstra/MessagingAPI-SDK-node) - [Messaging API - .Net2 SDK](https://github.com/telstra/MessagingAPI-SDK-dotnet) - [Messaging API - Java SDK](https://github.com/telstra/MessagingAPI-SDK-Java) ## Blog Posts For more information on the Messaging API, you can read these blog posts: - [Callbacks Part 1](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-1) - [Callbacks Part 2](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-2) # noqa: E501
OpenAPI spec version: 2.2.9
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "https://tapi.telstra.com/v2"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# access token for OAuth
self.access_token = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("Telstra_Messaging")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'auth':
{
'type': 'oauth2',
'in': 'header',
'key': 'Authorization',
'value': 'Bearer ' + self.access_token
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 2.2.9\n"\
"SDK Package Version: 1.0.6".\
format(env=sys.platform, pyversion=sys.version)
| 73.495833
| 10,226
| 0.704122
| 2,430
| 17,639
| 5.053498
| 0.25679
| 0.017915
| 0.010261
| 0.011726
| 0.200489
| 0.15171
| 0.128583
| 0.095684
| 0.083713
| 0.083713
| 0
| 0.016659
| 0.217303
| 17,639
| 239
| 10,227
| 73.803347
| 0.872809
| 0.729803
| 0
| 0.058824
| 0
| 0
| 0.069332
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 1
| 0.137255
| false
| 0.019608
| 0.078431
| 0
| 0.323529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0a7c3be5b482ff8c7670f19fff95f3a54a4face
| 5,854
|
py
|
Python
|
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | 1
|
2021-05-23T15:50:25.000Z
|
2021-05-23T15:50:25.000Z
|
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | null | null | null |
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
# @Time : 2019-06-04
# @Author : hongshu
import sys
import asyncio
from tawsocks import common
class TcpRelayHandler(object):
def __init__(self, is_client, config, loop):
self.is_client = is_client
self.config = config
self.loop = loop
async def start(self):
await self._listening()
async def _listening(self):
if self.is_client:
await asyncio.start_server(self._shake_hand, '0.0.0.0', self.config.client_port, loop=self.loop)
else:
await asyncio.start_server(self._establish_connection, '0.0.0.0', self.config.server_port, loop=self.loop)
async def _shake_hand(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
data = await common.read_data(reader, False, self.config.password)
if len(data) < 2 or len(data) != 2 + data[1] or data[0] != 0x05:
self._shake_hand_fail(writer)
writer.close()
return
# 判断客户端是否接受"无需认证"的方式
if 0x00 not in data[2:]:
self._shake_hand_fail(writer)
writer.close()
return
self._shake_hand_success(writer)
await self._establish_connection(reader, writer)
def _shake_hand_success(self, writer):
common.write_data(writer, b'\x05\x00', False, self.config.password)
def _shake_hand_fail(self, writer):
common.write_data(writer, b'\x05\xff', False, self.config.password)
async def _establish_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
if self.is_client:
data = await common.read_data(reader, False, self.config.password)
try:
remote_reader, remote_writer = \
await asyncio.open_connection(self.config.server_host, self.config.server_port, loop=self.loop)
except:
print('connot connect to proxy server')
self._establish_connection_fail(writer, 0x04)
writer.close()
return
common.write_data(remote_writer, data, True, self.config.password)
data = await common.read_data(remote_reader, True, self.config.password)
if data[1] == 0x00:
self._establish_connection_success(writer)
else:
self._establish_connection_fail(writer, data[1])
writer.close()
return
await self._transfer_data(reader, writer, remote_reader, remote_writer)
else:
data = await common.read_data(reader, True, self.config.password)
if data[0] != 0x05 or data[2] != 0x00:
self._establish_connection_fail(writer, 0x02)
writer.close()
return
# 只支持TCP和UDP
if data[1] == 0x01: # TCP
pass
elif data[1] == 0x03: # UDP
self._establish_connection_success(writer)
return
else:
self._establish_connection_fail(writer, 0x07)
writer.close()
return
if data[3] == 0x01: # IPv4
remote_host = '%d.%d.%d.%d' % (int(data[4]), int(data[5]), int(data[6]), int(data[7]))
elif data[3] == 0x03: # 域名
remote_host = str(data[5: -2], encoding='utf-8')
elif data[3] == 0x04: # IPv6
self._establish_connection_fail(writer, 0x08)
writer.close()
return
else:
self._establish_connection_fail(writer, 0x02)
writer.close()
return
remote_port = int.from_bytes(bytes=data[-2:], byteorder='big')
print("remote host: %s:%s" % (remote_host, remote_port))
try:
remote_reader, remote_writer = await asyncio.open_connection(remote_host, remote_port, loop=self.loop)
except:
print('connect fail to %s' % remote_host, file=sys.stderr)
self._establish_connection_fail(writer, 0x04)
writer.close()
return
self._establish_connection_success(writer)
await self._transfer_data(reader, writer, remote_reader, remote_writer)
def _establish_connection_success(self, writer):
if self.is_client:
data = bytes([0x05, 0x00, 0x00, 0x01, 0, 0, 0, 0])
data += common.convert_port_to_bytes(self.config.client_port)
common.write_data(writer, data, False, self.config.password)
else:
data = bytes([0x05, 0x00])
common.write_data(writer, data, True, self.config.password)
def _establish_connection_fail(self, writer, error_code):
if self.is_client:
data = bytes([0x05, error_code, 0x00, 0x01, 0, 0, 0, 0])
data += common.convert_port_to_bytes(self.config.client_port)
common.write_data(writer, data, False, self.config.password)
else:
data = bytes([0x05, error_code])
common.write_data(writer, data, True, self.config.password)
async def _transfer_data(self, reader, writer, remote_reader, remote_writer):
if self.is_client:
await asyncio.gather(
common.transfer_data_with_encrypt(reader, remote_writer, self.config.password),
common.transfer_data_with_decrypt(remote_reader, writer, self.config.password),
loop=self.loop
)
else:
await asyncio.gather(
common.transfer_data_with_decrypt(reader, remote_writer, self.config.password),
common.transfer_data_with_encrypt(remote_reader, writer, self.config.password),
loop=self.loop
)
| 39.823129
| 118
| 0.591561
| 680
| 5,854
| 4.875
| 0.173529
| 0.066365
| 0.081448
| 0.057014
| 0.670588
| 0.5819
| 0.504676
| 0.375867
| 0.330618
| 0.176169
| 0
| 0.03284
| 0.308165
| 5,854
| 146
| 119
| 40.09589
| 0.785679
| 0.021353
| 0
| 0.5
| 0
| 0
| 0.020112
| 0
| 0
| 0
| 0.018188
| 0
| 0
| 1
| 0.042373
| false
| 0.135593
| 0.025424
| 0
| 0.161017
| 0.025424
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d0af8ccc38db80b7705a16b0b92de3ffc09909b1
| 321
|
py
|
Python
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 1
|
2021-05-10T01:16:28.000Z
|
2021-05-10T01:16:28.000Z
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 3
|
2021-05-11T06:14:15.000Z
|
2021-06-19T08:18:36.000Z
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | null | null | null |
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
from collections import Counter
n, *a = map(int, read().split())
counter = Counter(a).values()
ans = len(counter)
if (sum(counter) - ans) % 2 == 1:
ans -= 1
print(ans)
| 21.4
| 38
| 0.697819
| 48
| 321
| 4.666667
| 0.541667
| 0.107143
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021898
| 0.146417
| 321
| 14
| 39
| 22.928571
| 0.79562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.083333
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0d550ba7652a9b60f892093b2e1479dc926d08c
| 751
|
py
|
Python
|
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
"""A utility module which has FD-related functions.
This module mostly exists for L{clean_fds}, so it can be imported without
accidentally getting a reactor or something else that might create a critical
file descriptor.
"""
import os
import resource
def clean_fds():
"""Close all non-stdio file descriptors.
This should be called at the beginning of a program to avoid inheriting any
unwanted file descriptors from the invoking process. Unfortunately, this
is really common in unix!
"""
rlimit_nofile = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
total_descriptors = min(4096, rlimit_nofile)
for fd in range(3, total_descriptors):
try:
os.close(fd)
except OSError:
pass
| 28.884615
| 79
| 0.713715
| 105
| 751
| 5.038095
| 0.72381
| 0.068053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.225033
| 751
| 25
| 80
| 30.04
| 0.898625
| 0.576565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.1
| 0.2
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d0e54036779246dea8bdd23ebf8e7a5ba24254b9
| 1,054
|
py
|
Python
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 7
|
2017-03-09T11:28:42.000Z
|
2019-10-26T02:12:09.000Z
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 12
|
2017-03-24T07:45:41.000Z
|
2019-12-20T15:44:11.000Z
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 5
|
2017-03-09T11:28:15.000Z
|
2021-02-18T13:14:34.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# flake8: noqa
from six import (
add_metaclass,
iteritems,
raise_from,
string_types,
text_type,
)
from six.moves import configparser
from six.moves.reprlib import Repr
from six.moves.urllib.parse import parse_qs, urlsplit, urlunsplit
from six.moves.urllib.parse import urlparse, urlencode
from six.moves.urllib.request import urlopen, urlretrieve
from six.moves.urllib.error import HTTPError
try:
maketrans = str.maketrans
except AttributeError:
from string import maketrans
| 30.114286
| 74
| 0.766603
| 154
| 1,054
| 5.214286
| 0.597403
| 0.061021
| 0.089664
| 0.089664
| 0.072229
| 0.072229
| 0
| 0
| 0
| 0
| 0
| 0.005701
| 0.167932
| 1,054
| 34
| 75
| 31
| 0.90992
| 0.506641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.470588
| 0
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
d0e8204ae150a3e8c57ae24fe1a684bdf4ee48d0
| 4,650
|
py
|
Python
|
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | null | null | null |
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | null | null | null |
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | 1
|
2020-04-11T09:40:17.000Z
|
2020-04-11T09:40:17.000Z
|
import json
from bunch import Bunch
import os
def get_config_from_json(json_file):
"""
Get the config from a json file
:param json_file:
:return: config(namespace) or config(dictionary)
"""
# parse the configurations from the config json file provided
with open(json_file, 'r') as config_file:
config_dict = json.load(config_file)
# convert the dictionary to a namespace using bunch lib
config = Bunch(config_dict)
config = default_values(config)
return config, config_dict
def process_config(jsonfile):
config, _ = get_config_from_json(jsonfile)
config.summary_dir = os.path.join("../experiments", config.exp_name, "summary")
config.checkpoint_dir = os.path.join("../experiments", config.exp_name, "checkpoint")
return config
def default_values(config):
config['target_cluster'] = -1 if not 'target_cluster' in config.keys() else config['target_cluster']
config['rater_id'] = -1 if not 'rater_id' in config.keys() else config['rater_id']
config['gt_priors'] = False if not 'gt_priors' in config.keys() else config['gt_priors']
config['priors'] = False if not 'priors' in config.keys() else config['priors']
config['reg'] = False if not 'reg' in config.keys() else config['reg']
config['modified_CE'] = False if not 'modified_CE' in config.keys() else config['modified_CE']
config['ccc_err'] = False if not 'ccc_err' in config.keys() else config['ccc_err']
config['rmse_weights'] = 1 if not 'rmse_weights' in config.keys() else config['rmse_weights']
config['cccerr_weights'] = 1 if not 'cccerr_weights' in config.keys() else config['cccerr_weights']
config['yout_weights'] = 1 if not 'yout_weights' in config.keys() else config['yout_weights']
config['alpha1'] = 1 if not 'alpha1' in config.keys() else config['alpha1']
config['alpha2'] = 1 if not 'alpha2' in config.keys() else config['alpha2']
config['fcs_num'] = 0 if not 'fcs_num' in config.keys() else config['fcs_num']
config['n_fc'] = 16 if not 'n_fc' in config.keys() else config['n_fc']
config['fc_act'] = 'tanh' if not 'tanh' in config.keys() else config['tanh']
config['fc_path'] = 0 if not 'fc_path' in config.keys() else config['fc_path']
config['clf_bias'] = 0 if not 'clf_bias' in config.keys() else config['clf_bias']
config['audio_video_feat'] = 0 if not 'audio_video_feat' in config.keys() else config['audio_video_feat']
config['clf_bias'] = 1 if not 'clf_bias' in config.keys() else config['clf_bias']
config['gt'] = 'onehot' if not 'gt' in config.keys() else config['gt']
config['ccc_diff'] = -0.01 if not 'ccc_diff' in config.keys() else config['ccc_diff']
config['reset_lr'] = True if not 'reset_lr' in config.keys() else config['reset_lr']
config['stage2'] = 0 if not 'stage2' in config.keys() else config['stage2']
config['max_to_keep'] = 1000 if not 'max_to_keep' in config.keys() else config['max_to_keep']
config['subset'] = 'joint_modling' if not 'subset' in config.keys() else config['subset']
config['log_dir'] = 'logs' if not 'log_dir' in config.keys() else config['log_dir']
config['max_length'] = 7500 if not 'max_length' in config.keys() else config['max_length']
config['sequence_length'] = 7500 if not 'sequence_length' in config.keys() else config['sequence_length']
config['learning_rate'] = 0.02 if not 'learning_rate' in config.keys() else config['learning_rate']
config['num_epochs'] = 20 if not 'num_epochs' in config.keys() else config['num_epochs']
return config
| 73.809524
| 133
| 0.529892
| 551
| 4,650
| 4.297641
| 0.183303
| 0.063345
| 0.152027
| 0.202703
| 0.356841
| 0.198902
| 0.070946
| 0.070946
| 0.039696
| 0.039696
| 0
| 0.01479
| 0.360215
| 4,650
| 62
| 134
| 75
| 0.781176
| 0.045806
| 0
| 0.043478
| 0
| 0
| 0.19243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0
| 0.065217
| 0
| 0.195652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0f021bab54a09a9592e986f398fbcf5edaf9bb8
| 1,059
|
py
|
Python
|
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
from django.db import migrations
from backent.api import enums
def populate_tags(apps, schema_editor):
EventTag = apps.get_model('backent_api', 'EventTag')
EventTag.objects.get_or_create(name=enums.EVENT_TAG_BEGINNER_FRIENDLY)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_INTERNATIONAL)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_PWD_FRIENDLY)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_UNDERAGE_FRIENDLY)
def unpopulate_tags(apps, schema_editor):
EventTag = apps.get_model('backent_api', 'EventTag')
EventTag.objects.filter(name=enums.EVENT_TAG_BEGINNER_FRIENDLY).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_INTERNATIONAL).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_PWD_FRIENDLY).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_UNDERAGE_FRIENDLY).delete()
class Migration(migrations.Migration):
dependencies = [
('backent_api', '0009_add_event_tags'),
]
operations = [
migrations.RunPython(populate_tags, unpopulate_tags),
]
| 34.16129
| 76
| 0.777148
| 136
| 1,059
| 5.727941
| 0.279412
| 0.154044
| 0.143774
| 0.174583
| 0.731707
| 0.698331
| 0.629012
| 0.599487
| 0.543004
| 0.318357
| 0
| 0.004283
| 0.118036
| 1,059
| 30
| 77
| 35.3
| 0.829764
| 0
| 0
| 0.095238
| 0
| 0
| 0.064212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.095238
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d0f9d9924cd74ed348272e7fd7ebf2c3d8c9e835
| 2,844
|
py
|
Python
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 8
|
2015-03-19T11:25:32.000Z
|
2020-09-02T11:30:10.000Z
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 7
|
2015-03-23T14:34:28.000Z
|
2022-02-21T12:36:01.000Z
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 4
|
2018-11-28T21:59:27.000Z
|
2020-01-05T01:50:08.000Z
|
from sqlpuzzle._common import Object
from sqlpuzzle._queries.options import Options
__all__ = ()
class SelectOptions(Options):
_definition_of_options = {
'sql_cache': {
'off': '',
'cache': 'SQL_CACHE',
'no_cache': 'SQL_NO_CACHE'
},
'duplicated': {
'off': '',
'all': 'ALL',
'distinct': 'DISTINCT',
'distinctrow': 'DISTINCTROW',
},
'sql_small_result': {
'off': '',
'on': 'SQL_SMALL_RESULT',
},
'sql_big_result': {
'off': '',
'on': 'SQL_BIG_RESULT',
},
'sql_buffer_result': {
'off': '',
'on': 'SQL_BUFFER_RESULT',
},
'sql_calc_found_rows': {
'off': '',
'on': 'SQL_CALC_FOUND_ROWS',
},
'straight_join': {
'off': '',
'on': 'STRAIGHT_JOIN',
},
'high_priority': {
'off': '',
'on': 'HIGH_PRIORITY',
},
}
def sql_cache(self, allow=True):
self._options['sql_cache'] = 'cache' if allow else 'off'
def sql_no_cache(self, allow=True):
self._options['sql_cache'] = 'no_cache' if allow else 'off'
def all(self, allow=True):
self._options['duplicated'] = 'all' if allow else 'off'
def distinct(self, allow=True):
self._options['duplicated'] = 'distinct' if allow else 'off'
def distinctrow(self, allow=True):
self._options['duplicated'] = 'distinctrow' if allow else 'off'
def sql_small_result(self, allow=True):
self._options['sql_small_result'] = 'on' if allow else 'off'
def sql_big_result(self, allow=True):
self._options['sql_big_result'] = 'on' if allow else 'off'
def sql_buffer_result(self, allow=True):
self._options['sql_buffer_result'] = 'on' if allow else 'off'
def sql_calc_found_rows(self, allow=True):
self._options['sql_calc_found_rows'] = 'on' if allow else 'off'
def straight_join(self, allow=True):
self._options['straight_join'] = 'on' if allow else 'off'
def high_priority(self, allow=True):
self._options['high_priority'] = 'on' if allow else 'off'
class SelectForUpdate(Object):
def __init__(self):
super().__init__()
self._for_update = False
def __str__(self):
if self._for_update:
return 'FOR UPDATE'
return ''
def __eq__(self, other):
return (
type(self) == type(other)
and self._for_update == other._for_update
)
@property
def is_set(self):
return self._for_update
def has(self, value):
return hasattr(self, value)
def for_update(self, allow=True):
self._for_update = bool(allow)
| 27.085714
| 71
| 0.54782
| 321
| 2,844
| 4.52648
| 0.174455
| 0.074329
| 0.107364
| 0.140399
| 0.405368
| 0.337922
| 0.176875
| 0.108741
| 0
| 0
| 0
| 0
| 0.312588
| 2,844
| 104
| 72
| 27.346154
| 0.743223
| 0
| 0
| 0.096386
| 0
| 0
| 0.192335
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.204819
| false
| 0
| 0.024096
| 0.036145
| 0.325301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
190383e67ecd4d49f6a52f77fa42e3e6a18c204f
| 4,197
|
py
|
Python
|
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
#!/usr/bin/env python
import argparse
import boto
from boto.s3 import connection as s3_connection
from cinderclient.v1 import client as cinder_v1
import code
from novaclient.v1_1 import client as nova_v1
from novaclient.shell import OpenStackComputeShell as open_shell
from glanceclient import Client as glance_client
from keystoneclient.v2_0 import client as key_v2
from neutronclient.v2_0 import client as neutron_v2
import os
import swiftclient
import sys
from urlparse import urlparse
def parse_args():
a = argparse.ArgumentParser(description='Give me the api clients')
a.add_argument('--username', help='Auth username')
a.add_argument('--password', help='Auth password')
a.add_argument('--tenant-name', help='Auth tenant name')
a.add_argument('--auth-url', help='Auth url')
a.add_argument('--ca-cert', help='Ca cert file')
return a.parse_args()
def get_env(args):
if not args['username']:
args['username'] = os.getenv('OS_USERNAME', None)
if not args['password']:
args['password'] = os.getenv('OS_PASSWORD', None)
if not args['tenant_name']:
args['tenant_name'] = os.getenv('OS_TENANT_NAME', None)
if not args['auth_url']:
args['auth_url'] = os.getenv('OS_AUTH_URL', None)
if not args['ca_cert']:
args['ca_cert'] = os.getenv('OS_CACERT')
# Check for args
must_have = ['username', 'password', 'tenant_name', 'auth_url']
for item in must_have:
if args[item] == None:
sys.exit("Don't have:%s, exiting" % item)
return args
def main():
args = vars(parse_args())
args = get_env(args)
extensions = open_shell()._discover_extensions("1.1")
nova = nova_v1.Client(args['username'],
args['password'],
args['tenant_name'],
args['auth_url'],
extensions=extensions,
cacert=args['ca_cert'])
keystone = key_v2.Client(username=args['username'],
password=args['password'],
tenant_name=args['tenant_name'],
auth_url=args['auth_url'],
cacert=args['ca_cert'],)
neutron = neutron_v2.Client(username=args['username'],
password=args['password'],
tenant_name=args['tenant_name'],
auth_url=args['auth_url'],
cacert=args['ca_cert'],)
cinder = cinder_v1.Client(args['username'],
args['password'],
args['tenant_name'],
args['auth_url'],
cacert=args['ca_cert'],)
swift = swiftclient.client.Connection(auth_version='2',
user=args['username'],
key=args['password'],
tenant_name=args['tenant_name'],
authurl=args['auth_url'])
token = keystone.auth_token
service_catalog = keystone.service_catalog
catalog = service_catalog.catalog['serviceCatalog']
glance_ip = None
for endpoint in catalog:
if 'image' == endpoint['type']:
glance_ip = endpoint['endpoints'][0]['publicURL']
glance = glance_client('1', endpoint=glance_ip, token=token)
creds = keystone.ec2.list(keystone.user_id)
if len(creds) == 0:
keystone.ec2.create(keystone.user_id, keystone.tenant_id)
creds = keystone.ec2.list(keystone.user_id)
cred = creds[-1]
s3_url = urlparse(keystone.service_catalog.url_for(service_type='object-store'))
host, port = s3_url.netloc.split(':')
s3 = boto.connect_s3(aws_access_key_id=cred.access,
aws_secret_access_key=cred.secret,
host=host,
port=int(port),
is_secure=False,
calling_format=s3_connection.OrdinaryCallingFormat())
code.interact(local=locals())
if __name__ == '__main__':
main()
| 41.97
| 84
| 0.571122
| 480
| 4,197
| 4.7875
| 0.254167
| 0.060923
| 0.042646
| 0.022628
| 0.227154
| 0.194082
| 0.194082
| 0.141862
| 0.141862
| 0.141862
| 0
| 0.011046
| 0.309745
| 4,197
| 99
| 85
| 42.393939
| 0.782188
| 0.008339
| 0
| 0.195652
| 0
| 0
| 0.145192
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032609
| false
| 0.097826
| 0.152174
| 0
| 0.206522
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1907beae999c84a846e911c9160f122031a33418
| 3,046
|
py
|
Python
|
tools/perf/contrib/cluster_telemetry/screenshot_unittest.py
|
zipated/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
tools/perf/contrib/cluster_telemetry/screenshot_unittest.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
tools/perf/contrib/cluster_telemetry/screenshot_unittest.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import tempfile
from telemetry import decorators
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.util import image_util
from contrib.cluster_telemetry import screenshot
class ScreenshotUnitTest(page_test_test_case.PageTestTestCase):
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._png_outdir = tempfile.mkdtemp('_png_test')
def tearDown(self):
shutil.rmtree(self._png_outdir)
@decorators.Enabled('linux')
def testScreenshot(self):
# Screenshots for Cluster Telemetry purposes currently only supported on
# Linux platform.
page_set = self.CreateStorySetFromFileInUnittestDataDir(
'screenshot_test.html')
measurement = screenshot.Screenshot(self._png_outdir)
self.RunMeasurement(measurement, page_set, options=self._options)
path = self._png_outdir + '/' + page_set.stories[0].file_safe_name + '.png'
self.assertTrue(os.path.exists(path))
self.assertTrue(os.path.isfile(path))
self.assertTrue(os.access(path, os.R_OK))
image = image_util.FromPngFile(path)
screenshot_pixels = image_util.Pixels(image)
special_colored_pixel = bytearray([217, 115, 43])
self.assertTrue(special_colored_pixel in screenshot_pixels)
@decorators.Enabled('linux')
def testIsScreenshotWithinDynamicContentThreshold(self):
# TODO(lchoi): This unit test fails on Windows due to an apparent platform
# dependent image decoding behavior that will need to be investigated in the
# future if Cluster Telemetry ever becomes compatible with Windows.
width = 2
height = 1
num_total_pixels = width * height
content_pixels = bytearray([0, 0, 0, 128, 128, 128])
base_screenshot = image_util.FromRGBPixels(width, height, content_pixels)
next_pixels = bytearray([1, 1, 1, 128, 128, 128])
next_screenshot = image_util.FromRGBPixels(width, height, next_pixels)
expected_pixels = bytearray([0, 255, 255, 128, 128, 128])
self.assertTrue(screenshot.IsScreenshotWithinDynamicContentThreshold(
base_screenshot, next_screenshot, content_pixels,
num_total_pixels, 0.51))
self.assertTrue(expected_pixels == content_pixels)
next_pixels = bytearray([0, 0, 0, 1, 1, 1])
next_screenshot = image_util.FromRGBPixels(2, 1, next_pixels)
expected_pixels = bytearray([0, 255, 255, 0, 255, 255])
self.assertTrue(screenshot.IsScreenshotWithinDynamicContentThreshold(
base_screenshot, next_screenshot, content_pixels,
num_total_pixels, 0.51))
self.assertTrue(expected_pixels == content_pixels)
self.assertFalse(screenshot.IsScreenshotWithinDynamicContentThreshold(
base_screenshot, next_screenshot, content_pixels,
num_total_pixels, 0.49))
| 41.726027
| 80
| 0.738345
| 370
| 3,046
| 5.878378
| 0.354054
| 0.051494
| 0.023908
| 0.044138
| 0.327816
| 0.276322
| 0.236782
| 0.236782
| 0.2
| 0.2
| 0
| 0.034359
| 0.178267
| 3,046
| 72
| 81
| 42.305556
| 0.834598
| 0.149705
| 0
| 0.211538
| 0
| 0
| 0.017048
| 0
| 0
| 0
| 0
| 0.013889
| 0.173077
| 1
| 0.076923
| false
| 0
| 0.153846
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1908ece0bbcf8875b565e097e59669305dfcf236
| 354
|
py
|
Python
|
_aulas/ex004.py
|
CarlosJunn/Aprendendo_Python
|
cddb29b5ee2058c3fb612574eb4af414770b7422
|
[
"MIT"
] | null | null | null |
_aulas/ex004.py
|
CarlosJunn/Aprendendo_Python
|
cddb29b5ee2058c3fb612574eb4af414770b7422
|
[
"MIT"
] | null | null | null |
_aulas/ex004.py
|
CarlosJunn/Aprendendo_Python
|
cddb29b5ee2058c3fb612574eb4af414770b7422
|
[
"MIT"
] | null | null | null |
a = input('digite algo :')
print('O tipo primitivo desswe valor é ', type(a))
print("Só tem espaços? ", a.isspace())
print('É um número? ', a.isnumeric())
print('E alfabetico?', a.isalpha())
print('É alphanumerico?', a.isalnum())
print('Esta em maiúsculas?', a.isupper())
print('Esta em minúsculas?', a.islower())
print('Está capitalizada', a.istitle())
| 35.4
| 50
| 0.675141
| 52
| 354
| 4.596154
| 0.634615
| 0.050209
| 0.09205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112994
| 354
| 10
| 51
| 35.4
| 0.761147
| 0
| 0
| 0
| 0
| 0
| 0.44507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.888889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
190ab0b7b7eed8792f426c4ad62cea8612750811
| 3,966
|
py
|
Python
|
authentication/login.py
|
ICTKevinWong/webservices-samples
|
35a8b8571d88276ff12ad60959192ce20ef5bf19
|
[
"BSD-3-Clause"
] | 6
|
2018-01-03T14:13:57.000Z
|
2021-07-28T21:12:35.000Z
|
authentication/login.py
|
ICTKevinWong/webservices-samples
|
35a8b8571d88276ff12ad60959192ce20ef5bf19
|
[
"BSD-3-Clause"
] | 5
|
2018-01-03T15:28:47.000Z
|
2020-08-28T08:25:07.000Z
|
authentication/login.py
|
ICTKevinWong/webservices-samples
|
35a8b8571d88276ff12ad60959192ce20ef5bf19
|
[
"BSD-3-Clause"
] | 6
|
2017-10-17T19:37:44.000Z
|
2021-08-19T13:10:16.000Z
|
"""
Examples of authenticating to the API.
Usage:
login <username> <password> <server>
login -h
Arguments:
username ID to provide for authentication
password Password corresponding to specified userid.
server API endpoint.
Options:
-h --help Show this screen.
--version Show version.
Description:
There are two ways that you can authenticate to the Web Services API. Both options are viable and are demonstrated
below with examples.
Basic-Authentication is probably the most popular option, especially for shorter/simpler usages of the API, mostly
because of its simplicity. The credentials are simply provided with each request.
There is a login endpoint (POST /devmgr/utils/login), that will allow you to explicitly authenticate with the API.
Upon authenticating, a JSESSIONID will be provided in the Response headers and as a Cookie that can be utilized
to create a persistent session (that will eventually timeout).
"""
import logging
import docopt
import requests
LOG = logging.getLogger(__name__)
def login(server, username, password):
# Define a re-usable Session object and set some standard headers
con = requests.Session()
con.headers.update({'Accept': 'application/json', 'Content-Type': 'application/json'})
# Here we do a login that will define a persistent session on the server-side upon successful authentication
result = con.post(server + "/devmgr/utils/login", json={'userId': username, 'password': password})
# You'll notice the JSESSIONID as a part of the Response headers
LOG.info("Headers: %s", result.headers)
# Notice how the JSESSIONID is now set as a cookie on the Session object?
LOG.info("Cookie Set: JSESSIONID: %s", con.cookies.get('JSESSIONID'))
# Now we make a subsequent request to a different Resource. Notice how the JESSIONID is persisted on the connection?
# Requests is intelligent enough to perist the cookie that is sent back in the Response on the requests.Session()!
result = con.get(server + "/devmgr/v2/storage-systems")
assert result.cookies.get('JSESSIONID') == con.cookies.get('JSESSIONID')
# Now let's avoid using a persistent session with the login
result1 = requests.post(server + "/devmgr/utils/login", json={'userId': username, 'password': password})
# Okay, now we have a different JSESSIONID, that's okay, that's what we expected.
assert result1.cookies.get('JSESSIONID') != con.cookies.get('JSESSIONID')
result2 = requests.get(server + "/devmgr/v2/storage-systems", auth=(username, password))
# Uh oh, we got an authentication error!?! That's because the JESSIONID wasn't set on a persistent session,
# and we didn't use Basic-Auth to authenticate directly!
LOG.warn("Request without a session or auth: %s", result2.status_code)
# This time we'll provide credentials using Basic-Authentication
result2 = requests.get(server + "/devmgr/v2/storage-systems", auth=(username, password))
# It works, but we got a new session.
assert result1.cookies.get('JSESSIONID') != result2.cookies.get('JSESSIONID')
# We can do something similar to what requests does for us by manually persisting the cookie. This may be necessary
# for less full-featured clients.
result1 = requests.post(server + "/devmgr/utils/login", json={'userId': username, 'password': password})
result2 = requests.get(server + "/devmgr/v2/storage-systems", cookies=result1.cookies)
# See, they match, and we don't have to provide authentication for this request!
assert result1.cookies.get('JSESSIONID') == result2.cookies.get('JSESSIONID')
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG,
format='%(relativeCreated)dms %(levelname)s %(module)s.%(funcName)s:%(lineno)d\n %(message)s')
args = docopt.docopt(__doc__)
login(args.get('<server>'), args.get('<username>'), args.get('<password>'))
| 47.783133
| 120
| 0.72113
| 547
| 3,966
| 5.197441
| 0.369287
| 0.031657
| 0.063313
| 0.023918
| 0.234611
| 0.219838
| 0.208934
| 0.178684
| 0.162504
| 0.120295
| 0
| 0.0049
| 0.176752
| 3,966
| 82
| 121
| 48.365854
| 0.86585
| 0.539334
| 0
| 0.153846
| 0
| 0.038462
| 0.297507
| 0.089197
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.038462
| false
| 0.269231
| 0.115385
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
190b6799d93e741a949b082cb1fde511c62a4b57
| 487
|
py
|
Python
|
Chapter08/qt08_winBkground03.py
|
csy1993/PythonQt
|
c100cd9e1327fc7731bf04c7754cafb8dd578fa5
|
[
"Apache-2.0"
] | null | null | null |
Chapter08/qt08_winBkground03.py
|
csy1993/PythonQt
|
c100cd9e1327fc7731bf04c7754cafb8dd578fa5
|
[
"Apache-2.0"
] | null | null | null |
Chapter08/qt08_winBkground03.py
|
csy1993/PythonQt
|
c100cd9e1327fc7731bf04c7754cafb8dd578fa5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
【简介】
界面背景颜色设置
'''
from PyQt5.QtWidgets import QApplication, QLabel ,QWidget, QVBoxLayout , QPushButton, QMainWindow
from PyQt5.QtGui import QPalette , QBrush , QPixmap
from PyQt5.QtCore import Qt
import sys
app = QApplication(sys.argv)
win = QMainWindow()
win.setWindowTitle("界面背景颜色设置")
win.resize(350, 250)
palette = QPalette()
palette.setColor(QPalette.Background , Qt.red )
win.setPalette(palette)
win.show()
sys.exit(app.exec_())
| 20.291667
| 99
| 0.710472
| 59
| 487
| 5.847458
| 0.627119
| 0.078261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.158111
| 487
| 23
| 100
| 21.173913
| 0.817073
| 0.082136
| 0
| 0
| 0
| 0
| 0.018519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.307692
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ef700e08b8631cf4f5d03872e7a2e1c13a5f31f4
| 50,478
|
py
|
Python
|
shwirl/shaders/render_volume.py
|
macrocosme/shwirl
|
87147ba1e99463e96b7f4295fd24ab57440d9981
|
[
"BSD-3-Clause"
] | 3
|
2018-05-09T17:55:53.000Z
|
2019-07-22T09:14:41.000Z
|
shwirl/shaders/render_volume.py
|
macrocosme/shwirl
|
87147ba1e99463e96b7f4295fd24ab57440d9981
|
[
"BSD-3-Clause"
] | 9
|
2017-04-07T01:44:15.000Z
|
2018-12-16T20:47:08.000Z
|
shwirl/shaders/render_volume.py
|
macrocosme/shwirl
|
87147ba1e99463e96b7f4295fd24ab57440d9981
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import division
# This file implements a RenderVolumeVisual class. It is derived from the
# VolumeVisual class in vispy.visuals.volume, which is released under a BSD
# license included here:
#
# ===========================================================================
# Vispy is licensed under the terms of the (new) BSD license:
#
# Copyright (c) 2015, authors of Vispy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Vispy Development Team nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ===========================================================================
#
# This modified version is released under the (new) BSD license:
#
# Copyright (c) 2015, Dany Vohl
# All rights reserved.
#
# A copy of the license is available in the root directory of this project.
#
from ..extern.vispy.gloo import Texture3D, TextureEmulated3D, VertexBuffer, IndexBuffer
from ..extern.vispy.visuals import Visual
from ..extern.vispy.visuals.shaders import Function
from ..extern.vispy.color import get_colormap
from ..extern.vispy.scene.visuals import create_visual_node
from ..extern.vispy.io import load_spatial_filters
import numpy as np
# Vertex shader
VERT_SHADER = """
attribute vec3 a_position;
// attribute vec3 a_texcoord;
uniform vec3 u_shape;
// varying vec3 v_texcoord;
varying vec3 v_position;
varying vec4 v_nearpos;
varying vec4 v_farpos;
void main() {
// v_texcoord = a_texcoord;
v_position = a_position;
// Project local vertex coordinate to camera position. Then do a step
// backward (in cam coords) and project back. Voila, we get our ray vector.
vec4 pos_in_cam = $viewtransformf(vec4(v_position, 1));
// intersection of ray and near clipping plane (z = -1 in clip coords)
pos_in_cam.z = -pos_in_cam.w;
v_nearpos = $viewtransformi(pos_in_cam);
// intersection of ray and far clipping plane (z = +1 in clip coords)
pos_in_cam.z = pos_in_cam.w;
v_farpos = $viewtransformi(pos_in_cam);
gl_Position = $transform(vec4(v_position, 1.0));
}
""" # noqa
# Fragment shader
FRAG_SHADER = """
// uniforms
uniform $sampler_type u_volumetex;
uniform vec3 u_shape;
uniform vec3 u_resolution;
uniform float u_threshold;
uniform float u_relative_step_size;
//uniform int u_color_scale;
//uniform float u_data_min;
//uniform float u_data_max;
// Moving box filter variables
uniform int u_filter_size;
uniform float u_filter_coeff;
uniform int u_filter_arm;
uniform int u_filter_type;
uniform int u_use_gaussian_filter;
uniform int u_gaussian_filter_size;
//uniform int u_log_scale;
// Volume Stats
uniform float u_volume_mean;
uniform float u_volume_std;
//uniform float u_volume_madfm;
uniform float u_high_discard_filter_value;
uniform float u_low_discard_filter_value;
uniform float u_density_factor;
uniform int u_color_method;
//varyings
// varying vec3 v_texcoord;
varying vec3 v_position;
varying vec4 v_nearpos;
varying vec4 v_farpos;
// uniforms for lighting. Hard coded until we figure out how to do lights
const vec4 u_ambient = vec4(0.2, 0.4, 0.2, 1.0);
const vec4 u_diffuse = vec4(0.8, 0.2, 0.2, 1.0);
const vec4 u_specular = vec4(1.0, 1.0, 1.0, 1.0);
const float u_shininess = 40.0;
//varying vec3 lightDirs[1];
// global holding view direction in local coordinates
vec3 view_ray;
float rand(vec2 co)
{{
// Create a pseudo-random number between 0 and 1.
// http://stackoverflow.com/questions/4200224
return fract(sin(dot(co.xy ,vec2(12.9898, 78.233))) * 43758.5453);
}}
float colorToVal(vec4 color1)
{{
return color1.g;
}}
vec4 movingAverageFilter_line_of_sight(vec3 loc, vec3 step)
{{
// Initialise variables
vec4 partial_color = vec4(0.0, 0.0, 0.0, 0.0);
for ( int i=1; i<=u_filter_arm; i++ )
{{
partial_color += $sample(u_volumetex, loc-i*step);
partial_color += $sample(u_volumetex, loc+i*step);
}}
partial_color += $sample(u_volumetex, loc);
// Evaluate mean
partial_color *= u_filter_coeff;
return partial_color;
}}
vec4 Gaussian_5(vec4 color_original, vec3 loc, vec3 direction) {{
vec4 color = vec4(0.0);
vec3 off1 = 1.3333333333333333 * direction;
color += color_original * 0.29411764705882354;
color += $sample(u_volumetex, loc + (off1 * u_resolution)) * 0.35294117647058826;
color += $sample(u_volumetex, loc - (off1 * u_resolution)) * 0.35294117647058826;
return color;
}}
vec4 Gaussian_9(vec4 color_original, vec3 loc, vec3 direction)
{{
vec4 color = vec4(0.0);
vec3 off1 = 1.3846153846 * direction;
vec3 off2 = 3.2307692308 * direction;
color += color_original * 0.2270270270;
color += $sample(u_volumetex, loc + (off1 * u_resolution)) * 0.3162162162;
color += $sample(u_volumetex, loc - (off1 * u_resolution)) * 0.3162162162;
color += $sample(u_volumetex, loc + (off2 * u_resolution)) * 0.0702702703;
color += $sample(u_volumetex, loc - (off2 * u_resolution)) * 0.0702702703;
return color;
}}
vec4 Gaussian_13(vec4 color_original, vec3 loc, vec3 direction) {{
vec4 color = vec4(0.0);
vec3 off1 = 1.411764705882353 * direction;
vec3 off2 = 3.2941176470588234 * direction;
vec3 off3 = 5.176470588235294 * direction;
color += color_original * 0.1964825501511404;
color += $sample(u_volumetex, loc + (off1 * u_resolution)) * 0.2969069646728344;
color += $sample(u_volumetex, loc - (off1 * u_resolution)) * 0.2969069646728344;
color += $sample(u_volumetex, loc + (off2 * u_resolution)) * 0.09447039785044732;
color += $sample(u_volumetex, loc - (off2 * u_resolution)) * 0.09447039785044732;
color += $sample(u_volumetex, loc + (off3 * u_resolution)) * 0.010381362401148057;
color += $sample(u_volumetex, loc - (off3 * u_resolution)) * 0.010381362401148057;
return color;
}}
// ----------------------------------------------------------------
// ----------------------------------------------------------------
// Edge detection Pass
// (adapted from https://www.shadertoy.com/view/MscSzf#)
// ----------------------------------------------------------------
float checkSame(vec4 center, vec4 sample, vec3 resolution) {{
vec2 centerNormal = center.xy;
float centerDepth = center.z;
vec2 sampleNormal = sample.xy;
float sampleDepth = sample.z;
vec2 sensitivity = (vec2(0.3, 1.5) * resolution.y / 50.0);
vec2 diffNormal = abs(centerNormal - sampleNormal) * sensitivity.x;
bool isSameNormal = (diffNormal.x + diffNormal.y) < 0.1;
float diffDepth = abs(centerDepth - sampleDepth) * sensitivity.y;
bool isSameDepth = diffDepth < 0.1;
return (isSameNormal && isSameDepth) ? 1.0 : 0.0;
}}
vec4 edge_detection(vec4 color_original, vec3 loc, vec3 step, vec3 resolution) {{
vec4 sample1 = $sample(u_volumetex, loc + (vec3(1., 1., 0.) / resolution));
vec4 sample2 = $sample(u_volumetex, loc + (vec3(-1., -1., 0.) / resolution));
vec4 sample3 = $sample(u_volumetex, loc + (vec3(-1., 1., 0.) / resolution));
vec4 sample4 = $sample(u_volumetex, loc + (vec3(1., -1., 0.) / resolution));
float edge = checkSame(sample1, sample2, resolution) *
checkSame(sample3, sample4, resolution);
return vec4(color_original.rgb, 1-edge);
}}
// ----------------------------------------------------------------
// ----------------------------------------------------------------
// Used with iso surface
vec4 calculateColor(vec4 betterColor, vec3 loc, vec3 step)
{{
// Calculate color by incorporating lighting
vec4 color1;
vec4 color2;
// View direction
vec3 V = normalize(view_ray);
// calculate normal vector from gradient
vec3 N; // normal
color1 = $sample( u_volumetex, loc+vec3(-step[0],0.0,0.0) );
color2 = $sample( u_volumetex, loc+vec3(step[0],0.0,0.0) );
N[0] = colorToVal(color1) - colorToVal(color2);
betterColor = max(max(color1, color2),betterColor);
color1 = $sample( u_volumetex, loc+vec3(0.0,-step[1],0.0) );
color2 = $sample( u_volumetex, loc+vec3(0.0,step[1],0.0) );
N[1] = colorToVal(color1) - colorToVal(color2);
betterColor = max(max(color1, color2),betterColor);
color1 = $sample( u_volumetex, loc+vec3(0.0,0.0,-step[2]) );
color2 = $sample( u_volumetex, loc+vec3(0.0,0.0,step[2]) );
N[2] = colorToVal(color1) - colorToVal(color2);
betterColor = max(max(color1, color2),betterColor);
float gm = length(N); // gradient magnitude
N = normalize(N);
// Flip normal so it points towards viewer
float Nselect = float(dot(N,V) > 0.0);
N = (2.0*Nselect - 1.0) * N; // == Nselect * N - (1.0-Nselect)*N;
// Get color of the texture (albeido)
color1 = betterColor;
color2 = color1;
// todo: parametrise color1_to_color2
// Init colors
vec4 ambient_color = vec4(0.0, 0.0, 0.0, 0.0);
vec4 diffuse_color = vec4(0.0, 0.0, 0.0, 0.0);
vec4 specular_color = vec4(0.0, 0.0, 0.0, 0.0);
vec4 final_color;
// todo: allow multiple light, define lights on viewvox or subscene
int nlights = 1;
for (int i=0; i<nlights; i++)
{{
// Get light direction (make sure to prevent zero devision)
vec3 L = normalize(view_ray); //lightDirs[i];
float lightEnabled = float( length(L) > 0.0 );
L = normalize(L+(1.0-lightEnabled));
// Calculate lighting properties
float lambertTerm = clamp( dot(N,L), 0.0, 1.0 );
vec3 H = normalize(L+V); // Halfway vector
float specularTerm = pow( max(dot(H,N),0.0), u_shininess);
// Calculate mask
float mask1 = lightEnabled;
// Calculate colors
ambient_color += mask1 * u_ambient; // * gl_LightSource[i].ambient;
diffuse_color += mask1 * lambertTerm;
specular_color += mask1 * specularTerm * u_specular;
}}
// Calculate final color by componing different components
final_color = color2 * ( ambient_color + diffuse_color) + specular_color;
final_color.a = color2.a;
// Done
return final_color;
}}
// for some reason, this has to be the last function in order for the
// filters to be inserted in the correct place...
void main() {{
vec3 farpos = v_farpos.xyz / v_farpos.w;
vec3 nearpos = v_nearpos.xyz / v_nearpos.w;
// Calculate unit vector pointing in the view direction through this
// fragment.
view_ray = normalize(farpos.xyz - nearpos.xyz);
// Compute the distance to the front surface or near clipping plane
float distance = dot(nearpos-v_position, view_ray);
distance = max(distance, min((-0.5 - v_position.x) / view_ray.x,
(u_shape.x - 0.5 - v_position.x) / view_ray.x));
distance = max(distance, min((-0.5 - v_position.y) / view_ray.y,
(u_shape.y - 0.5 - v_position.y) / view_ray.y));
//distance = max(distance, min((-0.5 - v_position.z) / view_ray.z,
// (u_shape.z - 0.5 - v_position.z) / view_ray.z));
// Now we have the starting position on the front surface
vec3 front = v_position + view_ray * distance;
// Decide how many steps to take
int nsteps = int(-distance / u_relative_step_size + 0.5);
if( nsteps < 1 )
discard;
// Get starting location and step vector in texture coordinates
vec3 step = ((v_position - front) / u_shape) / nsteps;
vec3 start_loc = front / u_shape;
// For testing: show the number of steps. This helps to establish
// whether the rays are correctly oriented
//gl_FragColor = vec4(0.0, nsteps / 3.0 / u_shape.x, 1.0, 1.0);
//return;
{before_loop}
vec3 loc = start_loc;
int iter = 0;
float discard_ratio = 1.0 / (u_high_discard_filter_value - u_low_discard_filter_value);
float low_discard_ratio = 1.0 / u_low_discard_filter_value;
for (iter=0; iter<nsteps; iter++)
{{
// Get sample color
vec4 color;
if (u_filter_size == 1)
color = $sample(u_volumetex, loc);
else {{
color = movingAverageFilter_line_of_sight(loc, step);
}}
if (u_use_gaussian_filter==1) {{
vec4 temp_color;
vec3 direction;
if (u_gaussian_filter_size == 5){{
// horizontal
direction = vec3(1., 0., 0.);
temp_color = Gaussian_5(color, loc, direction);
// vertical
direction = vec3(0., 1., 0.);
temp_color = Gaussian_5(temp_color, loc, direction);
// depth
direction = vec3(0., 0., 1.);
temp_color = Gaussian_5(temp_color, loc, direction);
}}
if (u_gaussian_filter_size == 9){{
// horizontal
direction = vec3(1., 0., 0.);
temp_color = Gaussian_9(color, loc, direction);
// vertical
direction = vec3(0., 1., 0.);
temp_color = Gaussian_9(temp_color, loc, direction);
// depth
direction = vec3(0., 0., 1.);
temp_color = Gaussian_9(temp_color, loc, direction);
}}
if (u_gaussian_filter_size == 13){{
// horizontal
direction = vec3(1., 0., 0.);
temp_color = Gaussian_13(color, loc, direction);
// vertical
direction = vec3(0., 1., 0.);
temp_color = Gaussian_13(temp_color, loc, direction);
// depth
direction = vec3(0., 0., 1.);
temp_color = Gaussian_13(temp_color, loc, direction);
}}
color = temp_color;
}}
float val = color.g;
// To force activating the uniform - this should be done differently
float density_factor = u_density_factor;
if (u_filter_type == 1) {{
// Get rid of very strong signal values
if (val > u_high_discard_filter_value)
{{
val = 0.;
}}
// Don't consider noisy values
//if (val < u_volume_mean - 3*u_volume_std)
if (val < u_low_discard_filter_value)
{{
val = 0.;
}}
if (u_low_discard_filter_value == u_high_discard_filter_value)
{{
if (u_low_discard_filter_value != 0.)
{{
val *= low_discard_ratio;
}}
}}
else {{
val -= u_low_discard_filter_value;
val *= discard_ratio;
}}
}}
else {{
if (val > u_high_discard_filter_value)
{{
val = 0.;
}}
if (val < u_low_discard_filter_value)
{{
val = 0.;
}}
}}
{in_loop}
// Advance location deeper into the volume
loc += step;
}}
{after_loop}
//gl_FragColor = edge_detection(gl_FragColor, loc, step, u_shape);
/* Set depth value - from visvis TODO
int iter_depth = int(maxi);
// Calculate end position in world coordinates
vec4 position2 = vertexPosition;
position2.xyz += ray*shape*float(iter_depth);
// Project to device coordinates and set fragment depth
vec4 iproj = gl_ModelViewProjectionMatrix * position2;
iproj.z /= iproj.w;
gl_FragDepth = (iproj.z+1.0)/2.0;
*/
}}
""" # noqa
MIP_SNIPPETS = dict(
before_loop="""
float maxval = -99999.0; // The maximum encountered value
int maxi = 0; // Where the maximum value was encountered
""",
in_loop="""
if( val > maxval ) {
maxval = val;
maxi = iter;
}
""",
after_loop="""
// Refine search for max value
loc = start_loc + step * (float(maxi) - 0.5);
for (int i=0; i<10; i++) {
maxval = max(maxval, $sample(u_volumetex, loc).g);
loc += step * 0.1;
}
if (maxval > u_high_discard_filter_value || maxval < u_low_discard_filter_value)
{{
maxval = 0.;
}}
// Color is associated to voxel intensity
// Moment 0
if (u_color_method == 0) {
gl_FragColor = $cmap(maxval);
}
// Moment 1
else if (u_color_method == 1) {
gl_FragColor = $cmap(loc.y);
gl_FragColor.a = maxval;
}
// Color is associated to RGB cube
else if (u_color_method == 2) {
gl_FragColor.r = loc.y;
gl_FragColor.g = loc.z;
gl_FragColor.b = loc.x;
gl_FragColor.a = maxval;
}
// Color by sigma values
else if (u_color_method == 3) {
if ( (maxval < (u_volume_mean + (3.0 * u_volume_std))) )
{
gl_FragColor = vec4(0., 0., 1., maxval);
}
// < 3 sigmas
if ( (maxval >= (u_volume_mean + (3.0 * u_volume_std))) &&
(maxval < (u_volume_mean + (4.0 * u_volume_std))) )
{
gl_FragColor = vec4(0., 1., 0., maxval);
}
if ( (maxval >= (u_volume_mean + (4.0 * u_volume_std))) &&
(maxval < (u_volume_mean + (5.0 * u_volume_std))) )
{
gl_FragColor = vec4(1., 0., 0., maxval);
}
if ( (maxval >= (u_volume_mean + (5.0 * u_volume_std))) )
{
gl_FragColor = vec4(1., 1., 1., maxval);
}
}
else {
// Moment 2
// TODO: verify implementation of MIP-mom2.
gl_FragColor = $cmap((maxval * ((maxval - loc.y) * (maxval - loc.y))) / maxval);
}
""",
)
MIP_FRAG_SHADER = FRAG_SHADER.format(**MIP_SNIPPETS)
LMIP_SNIPPETS = dict(
before_loop="""
float maxval = -99999.0; // The maximum encountered value
float local_maxval = -99999.0; // The local maximum encountered value
int maxi = 0; // Where the maximum value was encountered
int local_maxi = 0; // Where the local maximum value was encountered
bool local_max_found = false;
""",
in_loop="""
if( val > u_threshold && !local_max_found ) {
local_maxval = val;
local_maxi = iter;
local_max_found = true;
}
if( val > maxval) {
maxval = val;
maxi = iter;
}
""",
after_loop="""
if (!local_max_found) {
local_maxval = maxval;
local_maxi = maxi;
}
// Refine search for max value
loc = start_loc + step * (float(local_maxi) - 0.5);
for (int i=0; i<10; i++) {
local_maxval = max(local_maxval, $sample(u_volumetex, loc).g);
loc += step * 0.1;
}
if (local_maxval > u_high_discard_filter_value) {
local_maxval = 0.;
}
if (local_maxval < u_low_discard_filter_value) {
local_maxval = 0.;
}
// Color is associated to voxel intensity
if (u_color_method == 0) {
gl_FragColor = $cmap(local_maxval);
gl_FragColor.a = local_maxval;
}
// Color is associated to redshift/velocity
else {
gl_FragColor = $cmap(loc.y);
gl_FragColor.a = local_maxval;
}
""",
)
LMIP_FRAG_SHADER = FRAG_SHADER.format(**LMIP_SNIPPETS)
TRANSLUCENT_SNIPPETS = dict(
before_loop="""
vec4 integrated_color = vec4(0., 0., 0., 0.);
float mom0 = 0.;
float mom1 = 0.;
float ratio = 1/nsteps; // final average
float a1 = 0.;
float a2 = 0.;
""",
in_loop="""
float alpha;
// Case 1: Color is associated to voxel intensity
if (u_color_method == 0) {
/*color = $cmap(val);
a1 = integrated_color.a;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color += color * a2 / alpha;*/
color = $cmap(val);
a1 = integrated_color.a;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color += color * a2 / alpha;
}
else{
// Case 2: Color is associated to redshift/velocity
if (u_color_method == 1) {
color = $cmap(loc.y);
a1 = integrated_color.a;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color.rgb += color.rgb * a2 / alpha;
}
// Case 3: Color is associated to RGB cube
else {
if (u_color_method == 2){
color.r = loc.y;
color.g = loc.z;
color.b = loc.x;
a1 = integrated_color.a;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color.rgb += color.rgb * a2 / alpha;
}
// Case 4: Mom2
// TODO: Finish implementation of mom2 (not correct in its present form).
else {
// mom0
a1 = mom0;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
mom0 *= a1 / alpha;
mom0 += val * a2 / alpha;
// mom1
a1 = mom1;
a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
mom1 *= a1 / alpha;
mom1 += loc.y * a2 / alpha;
}
}
}
integrated_color.a = alpha;
// stop integrating if the fragment becomes opaque
if( alpha > 0.99 ){
iter = nsteps;
}
""",
after_loop="""
if (u_color_method != 3){
gl_FragColor = integrated_color;
}
else {
gl_FragColor = $cmap((mom0 * (mom0-mom1 * mom0-mom1)) / mom0);
}
""",
)
TRANSLUCENT_FRAG_SHADER = FRAG_SHADER.format(**TRANSLUCENT_SNIPPETS)
TRANSLUCENT2_SNIPPETS = dict(
before_loop="""
vec4 integrated_color = vec4(0., 0., 0., 0.);
float ratio = 1/nsteps; // final average
""",
in_loop="""
float alpha;
// Case 1: Color is associated to voxel intensity
if (u_color_method == 0) {
color = $cmap(val);
integrated_color = (val * density_factor + integrated_color.a * (1 - density_factor)) * color;
alpha = integrated_color.a;
//alpha = a1+a2;
// integrated_color *= a1 / alpha;
// integrated_color += color * a2 / alpha;
}
else{
// Case 2: Color is associated to redshift/velocity
if (u_color_method == 1) {
color = $cmap(loc.y);
float a1 = integrated_color.a;
float a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color.rgb += color.rgb * a2 / alpha;
}
// Case 3: Color is associated to RGB cube
else {
color.r = loc.x;
color.g = loc.z;
color.b = loc.y;
float a1 = integrated_color.a;
float a2 = val * density_factor * (1 - a1);
alpha = max(a1 + a2, 0.001);
integrated_color *= a1 / alpha;
integrated_color.rgb += color.rgb * a2 / alpha;
}
}
integrated_color.a = alpha;
// stop integrating if the fragment becomes opaque
if( alpha > 0.99 ){
iter = nsteps;
}
""",
after_loop="""
gl_FragColor = integrated_color;
""",
)
TRANSLUCENT2_FRAG_SHADER = FRAG_SHADER.format(**TRANSLUCENT2_SNIPPETS)
ADDITIVE_SNIPPETS = dict(
before_loop="""
vec4 integrated_color = vec4(0., 0., 0., 0.);
""",
in_loop="""
color = $cmap(val);
integrated_color = 1.0 - (1.0 - integrated_color) * (1.0 - color);
""",
after_loop="""
gl_FragColor = integrated_color;
""",
)
ADDITIVE_FRAG_SHADER = FRAG_SHADER.format(**ADDITIVE_SNIPPETS)
ISO_SNIPPETS = dict(
before_loop="""
vec4 color3 = vec4(0.0); // final color
vec3 dstep = 1.5 / u_shape; // step to sample derivative
gl_FragColor = vec4(0.0);
""",
in_loop="""
if (val > u_threshold-0.2) {
// Take the last interval in smaller steps
vec3 iloc = loc - step;
for (int i=0; i<10; i++) {
val = $sample(u_volumetex, iloc).g;
if (val > u_threshold) {
color = $cmap(val);
gl_FragColor = calculateColor(color, iloc, dstep);
iter = nsteps;
break;
}
iloc += step * 0.1;
}
}
""",
after_loop="""
""",
)
ISO_FRAG_SHADER = FRAG_SHADER.format(**ISO_SNIPPETS)
MINIP_SNIPPETS = dict(
before_loop="""
float maxval = -99999.0; // maximum encountered
float minval = 99999.0; // The minimum encountered value
int mini = 0; // Where the minimum value was encountered
""",
in_loop="""
if( val > maxval ) {
maxval = val;
}
if( val < minval ) {
minval = val;
mini = iter;
}
""",
after_loop="""
// Refine search for min value
loc = start_loc + step * (float(mini) - 0.5);
for (int i=0; i<10; i++) {
minval = min(minval, $sample(u_volumetex, loc).g);
loc += step * 0.1;
}
if (minval > u_high_discard_filter_value || minval < u_low_discard_filter_value)
{{
minval = 0.;
}}
// Color is associated to voxel intensity
if (u_color_method == 0) {
gl_FragColor = $cmap(minval);
//gl_FragColor.a = minval;
}
else{
// Color is associated to redshift/velocity
if (u_color_method == 1) {
gl_FragColor = $cmap(loc.y);
//if (minval == 0)
gl_FragColor.a = 1-minval;
}
// Color is associated to RGB cube
else {
if (u_color_method == 2) {
gl_FragColor.r = loc.y;
gl_FragColor.g = loc.z;
gl_FragColor.b = loc.x;
gl_FragColor.a = minval;
}
// Color by sigma values
else if (u_color_method == 3) {
if ( (1-minval < (u_volume_mean + (3.0 * u_volume_std))) )
{
gl_FragColor = vec4(0., 0., 1., 1-minval);
}
// < 3 sigmas
if ( (1-minval >= (u_volume_mean + (3.0 * u_volume_std))) &&
(1-minval < (u_volume_mean + (4.0 * u_volume_std))) )
{
gl_FragColor = vec4(0., 1., 0., 1-minval);
}
if ( (1-minval >= (u_volume_mean + (4.0 * u_volume_std))) &&
(1-minval < (u_volume_mean + (5.0 * u_volume_std))) )
{
gl_FragColor = vec4(1., 0., 0., 1-minval);
}
if ( (1-minval >= (u_volume_mean + (5.0 * u_volume_std))) )
{
gl_FragColor = vec4(1., 1., 1., 1-minval);
}
}
// Case 4: Mom2
// TODO: verify implementation of MIP-mom2.
else {
gl_FragColor = $cmap((minval * ((minval - loc.y) * (minval - loc.y))) / minval);
}
}
}
""",
)
MINIP_FRAG_SHADER = FRAG_SHADER.format(**MINIP_SNIPPETS)
frag_dict = {
'mip': MIP_FRAG_SHADER,
'lmip': LMIP_FRAG_SHADER,
'iso': ISO_FRAG_SHADER,
'avip': TRANSLUCENT_FRAG_SHADER,
'minip': MINIP_FRAG_SHADER,
'translucent2': TRANSLUCENT2_FRAG_SHADER,
'additive': ADDITIVE_FRAG_SHADER,
}
# _interpolation_template = """
# #include "misc/spatial-filters.frag"
# vec4 texture_lookup_filtered(vec2 texcoord) {
# if(texcoord.x < 0.0 || texcoord.x > 1.0 ||
# texcoord.y < 0.0 || texcoord.y > 1.0) {
# discard;
# }
# return %s($texture, $shape, texcoord);
# }"""
#
# _texture_lookup = """
# vec4 texture_lookup(vec2 texcoord) {
# if(texcoord.x < 0.0 || texcoord.x > 1.0 ||
# texcoord.y < 0.0 || texcoord.y > 1.0) {
# discard;
# }
# return texture2D($texture, texcoord);
# }"""
class RenderVolumeVisual(Visual):
""" Displays a 3D Volume
Parameters
----------
vol : ndarray
The volume to display. Must be ndim==3.
clim : tuple of two floats | None
The contrast limits. The values in the volume are mapped to
black and white corresponding to these values. Default maps
between min and max.
method : {'mip', 'avip', 'additive', 'iso'}
The render method to use. See corresponding docs for details.
Default 'mip'.
threshold : float
The threshold to use for the isosurafce render method. By default
the mean of the given volume is used.
relative_step_size : float
The relative step size to step through the volume. Default 0.8.
Increase to e.g. 1.5 to increase performance, at the cost of
quality.
cmap : str
Colormap to use.
emulate_texture : bool
Use 2D textures to emulate a 3D texture. OpenGL ES 2.0 compatible,
but has lower performance on desktop platforms.
"""
def __init__(self, vol, clim=None, method='mip', threshold=None,
relative_step_size=0.8, cmap='grays',
emulate_texture=False, color_scale='linear',
filter_type = 0, filter_size = 1,
use_gaussian_filter = False, gaussian_filter_size=9,
density_factor=0.01, color_method='Moment 0', log_scale=0,
interpolation='linear'):
tex_cls = TextureEmulated3D if emulate_texture else Texture3D
# Storage of information of volume
self._vol_shape = ()
self._clim = None
self._need_vertex_update = True
# Set the colormap
self._cmap = get_colormap(cmap)
# Create gloo objects
self._vertices = VertexBuffer()
self._texcoord = VertexBuffer(
np.array([
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1],
], dtype=np.float32))
# # load 'float packed rgba8' interpolation kernel
# # to load float interpolation kernel use
# # `load_spatial_filters(packed=False)`
# kernel, self._interpolation_names = load_spatial_filters()
#
# fun = [Function(_interpolation_template % n)
# for n in self._interpolation_names]
#
# self._interpolation_names = [n.lower()
# for n in self._interpolation_names]
#
# self._interpolation_fun = dict(zip(self._interpolation_names, fun))
# self._interpolation_names.sort()
# self._interpolation_names = tuple(self._interpolation_names)
#
# print self._interpolation_fun
#
# # overwrite "nearest" and "bilinear" spatial-filters
# # with "hardware" interpolation _data_lookup_fn
# self._interpolation_fun['nearest'] = Function(_texture_lookup)
# self._interpolation_fun['bilinear'] = Function(_texture_lookup)
#
# if interpolation not in self._interpolation_names:
# raise ValueError("interpolation must be one of %s" %
# ', '.join(self._interpolation_names))
#
# self._interpolation = interpolation
# check texture interpolation
# if self._interpolation == 'bilinear':
# self._interpolation = 'linear'
# else:
# self._interpolation = 'nearest'
self._tex = tex_cls((10, 10, 10), interpolation=interpolation,
wrapping='clamp_to_edge')
# self._tex = tex_cls((10, 10, 10), interpolation='linear',
# wrapping='clamp_to_edge')
# Create program
Visual.__init__(self, vcode=VERT_SHADER, fcode="")
self.shared_program['u_volumetex'] = self._tex
self.shared_program['a_position'] = self._vertices
self.shared_program['a_texcoord'] = self._texcoord
self._draw_mode = 'triangle_strip'
self._index_buffer = IndexBuffer()
# Only show back faces of cuboid. This is required because if we are
# inside the volume, then the front faces are outside of the clipping
# box and will not be drawn.
self.set_gl_state('translucent', cull_face=False)
# Set data
self.set_data(vol, clim)
# Set params
self.method = method
self.relative_step_size = relative_step_size
#self.color_scale = color_scale
# self.data_min = self._clim[0]
# self.data_max = self._clim[1]
# moving_box_filter (=1 means no filter)
self.filter_type = filter_type
self.filter_size = filter_size
# 3D gaussian filter
self.use_gaussian_filter = use_gaussian_filter
self.gaussian_filter_size = gaussian_filter_size
self.log_scale = log_scale
self.density_factor = density_factor
self.color_method = color_method
self.threshold = threshold if (threshold is not None) else vol.mean()
# print ("threshold", self.threshold)
self.freeze()
def set_data(self, vol, clim=None):
""" Set the volume data.
Parameters
----------
vol : ndarray
The 3D volume.
clim : tuple | None
Colormap limits to use. None will use the min and max values.
"""
# Check volume
if not isinstance(vol, np.ndarray):
raise ValueError('Volume visual needs a numpy array.')
if not ((vol.ndim == 3) or (vol.ndim == 4 and vol.shape[-1] <= 4)):
raise ValueError('Volume visual needs a 3D image.')
# Handle clim
if clim is not None:
clim = np.array(clim, float)
if not (clim.ndim == 1 and clim.size == 2):
raise ValueError('clim must be a 2-element array-like')
self._clim = tuple(clim)
if self._clim is None:
self._clim = np.nanmin(vol), np.nanmax(vol)
# Apply clim
vol = np.flipud(np.array(vol, dtype='float32', copy=False))
if self._clim[1] == self._clim[0]:
if self._clim[0] != 0.:
vol *= 1.0 / self._clim[0]
else:
vol -= self._clim[0]
vol /= self._clim[1] - self._clim[0]
# Deal with nan
if np.isnan(vol).any():
vol = np.nan_to_num(vol)
self.high_discard_filter_value = self._clim[1]
self.low_discard_filter_value = self._clim[0]
self.volume_mean = np.mean(vol)
self.volume_std = np.std(vol)
#self.volume_madfm = self.madfm(vol)
# Apply to texture
print ("min:", np.min(vol), "max:", np.max(vol))
self._tex.set_data(vol) # will be efficient if vol is same shape
self.shared_program['u_shape'] = (vol.shape[2], vol.shape[1], vol.shape[0])
self.shared_program['u_resolution'] = (1/vol.shape[2], 1/vol.shape[1], 1/vol.shape[0])
shape = vol.shape[:3]
if self._vol_shape != shape:
self._vol_shape = shape
self._need_vertex_update = True
self._vol_shape = shape
# Get some stats
self._kb_for_texture = np.prod(self._vol_shape) / 1024
@property
def interpolation(self):
""" Current interpolation function.
"""
return self._tex.interpolation
@interpolation.setter
def interpolation(self, interpolation):
# set interpolation technique
self._tex.interpolation = interpolation
@property
def clim(self):
""" The contrast limits that were applied to the volume data.
Settable via set_data().
"""
return self._clim
@property
def cmap(self):
return self._cmap
@cmap.setter
def cmap(self, cmap):
self._cmap = get_colormap(cmap)
self.shared_program.frag['cmap'] = Function(self._cmap.glsl_map)
self.update()
@property
def method(self):
"""The render method to use
Current options are:
* avip: voxel colors are blended along the view ray until
the result is opaque.
* mip: maxiumum intensity projection. Cast a ray and display the
maximum value that was encountered.
* additive: voxel colors are added along the view ray until
the result is saturated.
* iso: isosurface. Cast a ray until a certain threshold is
encountered. At that location, lighning calculations are
performed to give the visual appearance of a surface.
"""
return self._method
@method.setter
def method(self, method):
# Check and save
known_methods = list(frag_dict.keys())
if method not in known_methods:
raise ValueError('Volume render method should be in %r, not %r' %
(known_methods, method))
self._method = method
# Get rid of specific variables - they may become invalid
if 'u_threshold' in self.shared_program:
self.shared_program['u_threshold'] = None
self.shared_program.frag = frag_dict[method]
self.shared_program.frag['sampler_type'] = self._tex.glsl_sampler_type
self.shared_program.frag['sample'] = self._tex.glsl_sample
self.shared_program.frag['cmap'] = Function(self._cmap.glsl_map)
self.update()
@property
def color_method(self):
"""The way color is associated with voxel
Current options are:
* regular: Color is associated to voxel intensity (defined by the VR method)
* velocity/redshit: Color is associated to depth coordinate
and alpha to voxel intensity (defined by the VR method)
"""
return self._color_method
@color_method.setter
def color_method(self, color_method):
if color_method == 'Moment 0':
self._color_method = 0
elif color_method == 'Moment 1':
self._color_method = 1
elif color_method == 'rgb_cube':
self._color_method = 2
elif color_method == 'Sigmas':
self._color_method = 3
else:
self._color_method = 4
# print ("color_method", self._color_method)
self.shared_program['u_color_method'] = int(self._color_method)
self.update()
@property
def threshold(self):
""" The threshold value to apply for the isosurface render method.
Also used for the lmip transfer function.
"""
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
if 'u_threshold' in self.shared_program:
self.shared_program['u_threshold'] = self._threshold
self.update()
@property
def color_scale(self):
return self._color_scale
@color_scale.setter
def color_scale(self, color_scale):
if (color_scale == 'linear'):
self._color_scale = 0
else:
self._color_scale = 1
self.shared_program['u_color_scale'] = int(self._color_scale)
self.update()
@property
def log_scale(self):
return self._log_scale
@log_scale.setter
def log_scale(self, log_scale):
self._log_scale = int(log_scale)
#self.shared_program['u_log_scale'] = int(self._log_scale)
self.update()
@property
def data_min(self):
return self._data_min
@data_min.setter
def data_min(self, data_min):
self._data_min = 0.
self.shared_program['u_data_min'] = float(self._data_min)
self.update()
@property
def data_max(self):
return self._data_max
@data_max.setter
def data_max(self, data_max):
self._data_max = 0.
self.shared_program['u_data_max'] = float(self._data_max)
self.update()
@property
def moving_box_filter(self):
return self._moving_box_filter
@moving_box_filter.setter
def moving_box_filter(self, moving_box_filter):
self.shared_program['u_moving_box_filter'] = int(self._moving_box_filter)
self.update()
@property
def volume_mean(self):
return self._volume_mean
@volume_mean.setter
def volume_mean(self, volume_mean):
self._volume_mean = float(volume_mean)
self.shared_program['u_volume_mean'] = self._volume_mean
print ("self._volume_mean", self._volume_mean)
self.update()
@property
def volume_std(self):
return self._volume_std
@volume_std.setter
def volume_std(self, volume_std):
self._volume_std = float(volume_std)
self.shared_program['u_volume_std'] = self._volume_std
print("self._volume_std", self._volume_std)
self.update()
@property
def volume_madfm(self):
return self._volume_madfm
@volume_madfm.setter
def volume_madfm(self, volume_madfm):
self._volume_madfm = float(volume_madfm)
self._volume_madfm -= self._clim[0]
self._volume_madfm /= self._clim[1] - self._clim[0]
self.shared_program['u_volume_madfm'] = self._volume_madfm
self.update()
@property
def filter_size(self):
return self._filter_size
@filter_size.setter
def filter_size(self, filter_size):
self._filter_size = int(filter_size)
self.shared_program['u_filter_size'] = int(self._filter_size)
self.shared_program['u_filter_arm'] = int(np.floor(self._filter_size/2))
self.shared_program['u_filter_coeff'] = float(1/self._filter_size)
self.update()
@property
def filter_type(self):
return self._filter_type
@filter_type.setter
def filter_type(self, filter_type):
if filter_type == 'Rescale':
self._filter_type = 1
else:
self._filter_type = 0
self.shared_program['u_filter_type'] = int(self._filter_type)
self.update()
@property
def use_gaussian_filter(self):
return self._use_gaussian_filter
@use_gaussian_filter.setter
def use_gaussian_filter(self, use_gaussian_filter):
# print ("use_gaussian_filter", use_gaussian_filter)
self._use_gaussian_filter = int(use_gaussian_filter)
self.shared_program['u_use_gaussian_filter'] = int(self._use_gaussian_filter)
self.update()
@property
def gaussian_filter_size(self):
return self._gaussian_filter_size
@gaussian_filter_size.setter
def gaussian_filter_size(self, gaussian_filter_size):
self._gaussian_filter_size = int(gaussian_filter_size)
self.shared_program['u_gaussian_filter_size'] = int(self._gaussian_filter_size)
self.update()
@property
def high_discard_filter_value(self):
return self._high_discard_filter_value
@high_discard_filter_value.setter
def high_discard_filter_value(self, high_discard_filter_value):
self._high_discard_filter_value = float(high_discard_filter_value)
self._high_discard_filter_value -= self._clim[0]
self._high_discard_filter_value /= self._clim[1] - self._clim[0]
self.shared_program['u_high_discard_filter_value'] = self._high_discard_filter_value
self.update()
@property
def low_discard_filter_value(self):
return self._low_discard_filter_value
@low_discard_filter_value.setter
def low_discard_filter_value(self, low_discard_filter_value):
self._low_discard_filter_value = float(low_discard_filter_value)
self._low_discard_filter_value -= self._clim[0]
self._low_discard_filter_value /= self._clim[1] - self._clim[0]
self.shared_program['u_low_discard_filter_value'] = self._low_discard_filter_value
self.update()
@property
def density_factor(self):
return self._density_factor
@density_factor.setter
def density_factor(self, density_factor):
self._density_factor = float(density_factor)
self.shared_program['u_density_factor'] = self._density_factor
self.update()
@property
def relative_step_size(self):
""" The relative step size used during raycasting.
Larger values yield higher performance at reduced quality. If
set > 2.0 the ray skips entire voxels. Recommended values are
between 0.5 and 1.5. The amount of quality degradation depends
on the render method.
"""
return self._relative_step_size
@relative_step_size.setter
def relative_step_size(self, value):
value = float(value)
if value < 0.1:
raise ValueError('relative_step_size cannot be smaller than 0.1')
self._relative_step_size = value
self.shared_program['u_relative_step_size'] = value
def _create_vertex_data(self):
""" Create and set positions and texture coords from the given shape
We have six faces with 1 quad (2 triangles) each, resulting in
6*2*3 = 36 vertices in total.
"""
shape = self._vol_shape
# Get corner coordinates. The -0.5 offset is to center
# pixels/voxels. This works correctly for anisotropic data.
x0, x1 = -0.5, shape[2] - 0.5
y0, y1 = -0.5, shape[1] - 0.5
z0, z1 = -0.5, shape[0] - 0.5
pos = np.array([
[x0, y0, z0],
[x1, y0, z0],
[x0, y1, z0],
[x1, y1, z0],
[x0, y0, z1],
[x1, y0, z1],
[x0, y1, z1],
[x1, y1, z1],
], dtype=np.float32)
"""
6-------7
/| /|
4-------5 |
| | | |
| 2-----|-3
|/ |/
0-------1
"""
# Order is chosen such that normals face outward; front faces will be
# culled.
indices = np.array([2, 6, 0, 4, 5, 6, 7, 2, 3, 0, 1, 5, 3, 7],
dtype=np.uint32)
# Apply
self._vertices.set_data(pos)
self._index_buffer.set_data(indices)
def _compute_bounds(self, axis, view):
return 0, self._vol_shape[axis]
def _prepare_transforms(self, view):
trs = view.transforms
view.view_program.vert['transform'] = trs.get_transform()
view_tr_f = trs.get_transform('visual', 'document')
view_tr_i = view_tr_f.inverse
view.view_program.vert['viewtransformf'] = view_tr_f
view.view_program.vert['viewtransformi'] = view_tr_i
def _prepare_draw(self, view):
if self._need_vertex_update:
self._create_vertex_data()
def madfm(self, volume):
# As defined in Whiting, M. T. "DUCHAMP: a 3D source finder for spectral-lines data", MNRAS, 2012.
return np.median(volume - np.median(volume)) * 1.4826042
RenderVolume = create_visual_node(RenderVolumeVisual)
def get_interpolation_fun():
return get_interpolation_fun()
| 33.252964
| 110
| 0.567297
| 6,073
| 50,478
| 4.508315
| 0.130084
| 0.006501
| 0.004821
| 0.004237
| 0.431024
| 0.346762
| 0.302604
| 0.27459
| 0.24979
| 0.226816
| 0
| 0.039963
| 0.317386
| 50,478
| 1,517
| 111
| 33.274885
| 0.754622
| 0.150046
| 0
| 0.326025
| 0
| 0.025739
| 0.654076
| 0.044814
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048618
| false
| 0.000953
| 0.007626
| 0.018112
| 0.089609
| 0.00286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef724512834ae77b7fe4b2559fc21eb34f4025f5
| 5,476
|
py
|
Python
|
integration/experiment/common_args.py
|
avilcheslopez/geopm
|
35ad0af3f17f42baa009c97ed45eca24333daf33
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
integration/experiment/common_args.py
|
avilcheslopez/geopm
|
35ad0af3f17f42baa009c97ed45eca24333daf33
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
integration/experiment/common_args.py
|
avilcheslopez/geopm
|
35ad0af3f17f42baa009c97ed45eca24333daf33
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
#
# Copyright (c) 2015 - 2022, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
'''
Common command line arguments for experiments.
'''
def setup_run_args(parser):
"""Add common arguments for all run scripts:
--output-dir --node-count --trial-count --cool-off-time
"""
add_output_dir(parser)
add_node_count(parser)
add_trial_count(parser)
add_cool_off_time(parser)
add_enable_traces(parser)
add_enable_profile_traces(parser)
def add_output_dir(parser):
parser.add_argument('--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_trial_count(parser):
parser.add_argument('--trial-count', dest='trial_count',
action='store', type=int, default=2,
help='number of experiment trials to launch')
def add_node_count(parser):
parser.add_argument('--node-count', dest='node_count',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
def add_min_power(parser):
parser.add_argument('--min-power', dest='min_power',
action='store', type=float, default=None,
help='bottom power limit for the sweep')
def add_max_power(parser):
parser.add_argument('--max-power', dest='max_power',
action='store', type=float, default=None,
help='top power limit for the sweep')
def add_step_power(parser):
parser.add_argument('--step-power', dest='step_power',
action='store', type=float, default=10,
help='increment between power steps for sweep')
def add_label(parser):
parser.add_argument('--label', action='store', default="APP",
help='name of the application to use for plot titles')
def add_min_frequency(parser):
parser.add_argument('--min-frequency', dest='min_frequency',
action='store', type=float, default=None,
help='bottom core frequency limit for the sweep')
def add_max_frequency(parser):
parser.add_argument('--max-frequency', dest='max_frequency',
action='store', type=float, default=None,
help='top core frequency limit for the sweep')
def add_step_frequency(parser):
parser.add_argument('--step-frequency', dest='step_frequency',
action='store', type=float, default=None,
help='increment between core frequency steps for sweep')
def add_run_max_turbo(parser):
parser.add_argument("--run-max-turbo", dest="run_max_turbo",
action='store_true', default=False,
help='add extra run to the experiment at maximum turbo frequency')
def add_use_stdev(parser):
parser.add_argument('--use-stdev', dest='use_stdev',
action='store_true', default=False,
help='use standard deviation instead of min-max spread for error bars')
def add_cool_off_time(parser):
parser.add_argument('--cool-off-time', dest='cool_off_time',
action='store', type=float, default=60,
help='wait time between workload execution for cool down')
def add_agent_list(parser):
parser.add_argument('--agent-list', dest='agent_list',
action='store', type=str, default=None,
help='comma separated list of agents to be compared')
def add_enable_traces(parser):
parser.add_argument('--enable-traces', dest='enable_traces',
action='store_const', const=True,
default=False, help='Enable trace generation')
parser.add_argument('--disable-traces', dest='enable_traces',
action='store_const', const=False,
help='Disable trace generation')
def add_disable_traces(parser):
add_enable_traces(parser)
parser.set_defaults(enable_traces=True)
def add_enable_profile_traces(parser):
parser.add_argument('--enable-profile-traces', dest='enable_profile_traces',
action='store_const', const=True,
default=False, help='Enable profile trace generation')
parser.add_argument('--disable-profile-traces', dest='enable_profile_traces',
action='store_const', const=False,
help='Disable profile trace generation')
def add_disable_profile_traces(parser):
add_enable_profile_traces(parser)
parser.set_defaults(enable_profile_traces=True)
def add_performance_metric(parser):
parser.add_argument('--performance-metric', dest='performance_metric',
action='store', type=str, default='FOM',
help='metric to use for performance (default: figure of merit)')
def add_analysis_dir(parser):
parser.add_argument('--analysis-dir', dest='analysis_dir',
action='store', default='analysis',
help='directory for output analysis files')
| 36.506667
| 95
| 0.611578
| 640
| 5,476
| 5.040625
| 0.198438
| 0.080905
| 0.110663
| 0.135462
| 0.50558
| 0.33447
| 0.226906
| 0.181029
| 0.059826
| 0.059826
| 0
| 0.003774
| 0.274105
| 5,476
| 149
| 96
| 36.751678
| 0.807799
| 0.042001
| 0
| 0.170213
| 0
| 0
| 0.297432
| 0.017056
| 0
| 0
| 0
| 0
| 0
| 1
| 0.234043
| false
| 0
| 0
| 0
| 0.234043
| 0.010638
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef7445ff5f0dbb5c8605cf8ad95f6ecbcc7f04a5
| 10,466
|
py
|
Python
|
Source Code.py
|
S-AlMazrouai/Chess-king-last-position-finder
|
609346ee660655bd7aa2afe486c4ad074e3d33fc
|
[
"MIT"
] | 1
|
2022-02-04T11:14:13.000Z
|
2022-02-04T11:14:13.000Z
|
Source Code.py
|
S-AlMazrouai/Chess-king-last-position-finder
|
609346ee660655bd7aa2afe486c4ad074e3d33fc
|
[
"MIT"
] | null | null | null |
Source Code.py
|
S-AlMazrouai/Chess-king-last-position-finder
|
609346ee660655bd7aa2afe486c4ad074e3d33fc
|
[
"MIT"
] | null | null | null |
import requests
import json
import chess
import chess.pgn
import io
from collections import Counter
from openpyxl import load_workbook
import numpy
#API link: https://api.chess.com/pub/player/{user}/games/{year}/{month}/pgn
baseUrl='https://api.chess.com/pub/player/'
users=['Mazrouai'] # You can add one or more chess.com profile/s, make sure to type the prfile name/s as it's/they're written in chess.com.
for user in users:
years = range(2000,2022) # Add the range of the years you want this code to analyze (from,to).
months = ['01','02','03','04','05','06','07','08','09','10','11','12'] # Keep this as it is.
count=0
winBlackKingPos=[] # Array to collect King position in the games won as black.
lossBlackKingPos=[] # Array to collect King position in the games lost as black.
winWhiteKingPos=[] # Array to collect King position in the games won as white.
lossWhiteKingPos=[] # Array to collect King position in the games lost as white.
for i in years: # For loop to irritate through the specified years range.
for j in months: # For loop to irritate through the monthes of the specified years.
extension=str(str(user)+'/games/'+str(i)+'/'+str(j)+'/pgn') # Creates the extension for the baseUrl.
url=baseUrl+extension # Merges baseUrl with the extension.
response = requests.get(url)
pgns = io.StringIO(response.text)
if response.text == '': # Checks if pgn file is empty and if it is, it jumps to the next PGN file.
continue
while True:
games=chess.pgn.read_game(pgns) # Reads PGN file.
if games == None: # Checks if there is a game available to read inside the pgn file, if not it exits this loop to the next PGN file.
break
if games.headers['Black'] == '?': # Checks if game data is missing, if true it jumps to the next game.
continue
if games.headers['White'] == '?': # Checks if game data is missing, if true it jumps to the next game.
continue
board=games.board()
for move in games.mainline_moves(): # Moves to the last position in the game.
board.push(move)
map=board.piece_map() # Collect the position of the pieces in thier last move.
if games.headers['Black']== str(user): # Checks if the specified user is playing as black
for x,y in map.items():
if str(y) == 'k':
kingPos=chess.square_name(x) # Gets the black king postion.
if games.headers['Result'] == '0-1': # Collects the king position in the games won as black.
winBlackKingPos.append(kingPos)
if games.headers['Result'] == '1-0': # Collects the king position in the games lost as black.
lossBlackKingPos.append(kingPos)
else: # If the if condition is not satisfied then the specificed user is playing as white.
for x,y in map.items():
if str(y) == 'K':
kingPos=chess.square_name(x) # Gets the white king postion.
if games.headers['Result'] == '0-1': # Collects the king position in the games lost as white.
lossWhiteKingPos.append(kingPos)
if games.headers['Result'] == '1-0': # Collects the king position in the games won as white.
winWhiteKingPos.append(kingPos)
gamesWon=len(winBlackKingPos)+len(winWhiteKingPos) # Counts # of won games.
gamesLost=len(lossBlackKingPos)+len(lossWhiteKingPos) # Counts # of lost games.
gamesPlayed=gamesWon+gamesLost # counts # of analyzed games
print("Player: ",user) # Prints the name of the player.
print("games played: ",gamesPlayed) # Prints # of won games.
print("games won: ",gamesWon) # Prints # of lost games.
print("games lost: ",gamesLost) # Prints # of analyzed games
print("\n")
winWhiteKingPosCount= Counter(winWhiteKingPos) # Creates a list with a position and the number of times the wining white king was in that position.
lossWhiteKingPosCount= Counter(lossWhiteKingPos) # Creates a list with a position and the number of times the losing white king was in that position.
winBlackKingPosCount= Counter(winBlackKingPos) # Creates a list with a position and the number of times the wining black king was in that position.
lossBlackKingPosCount= Counter(lossBlackKingPos) # Creates a list with a position and the number of times the losing black king was in that position.
posCounts=[winWhiteKingPosCount,lossWhiteKingPosCount,winBlackKingPosCount,lossBlackKingPosCount] # Merges the lists into an array.
Data = load_workbook(filename='Data_Template.xlsx') # Opens the template excel file .
sheets=Data.sheetnames # Register the sheets name.
cellLetters=[] # Array for the cell letters in the excel file.
cellNum=[] # Array for the cell numbers in the excel file.
for j in range(8): # Generates cell letters to get the cells this code will work .
for i in range(66, 74):
cellLetters.append(chr(i))
for i in [10,9,8,7,6,5,4,3]: # Generates cell numbers to get the cells this code will work .
for j in range(8):
cellNum.append(i)
c = 0 # This variable will be used as an index to go thorugh the lists that have been merged into an array.
for sheet in sheets: # For loop to irritate through the excel sheets.
workSheet=Data[sheet]
posCount=posCounts[c] # Gets the postion list.
c=c+1
for i in range(64): # For loop to go through the sheet cells and assign them the king recurrence value.
cell=str(cellLetters[i])+str(cellNum[i]) # Constructs the excel cell name (e.g. A12).
count=posCount[chess.square_name(i)] # Gets the king postion count that correlates with the cell name.
if count== 0: # If king recurrence equals 0 set the cell to None.
count= None
workSheet[cell] = count # Makes the cell value equales the king recurrence in that position.
Data.save(filename='Data_'+str(user)+'.xlsx') # Saves the data into a new xlsx file
| 87.94958
| 228
| 0.391458
| 898
| 10,466
| 4.551225
| 0.265033
| 0.013457
| 0.028627
| 0.033276
| 0.31539
| 0.30071
| 0.24321
| 0.24321
| 0.233423
| 0.212381
| 0
| 0.01374
| 0.55494
| 10,466
| 118
| 229
| 88.694915
| 0.863675
| 0.280623
| 0
| 0.146067
| 0
| 0
| 0.027715
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.089888
| 0
| 0.089888
| 0.05618
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef7a8845996df8b5695e947565280cd90979fd06
| 1,840
|
py
|
Python
|
load.py
|
ontocord/create_pii_dataset
|
bfd246a8f8b443e238f260f307bd41d86adc3136
|
[
"Apache-2.0"
] | null | null | null |
load.py
|
ontocord/create_pii_dataset
|
bfd246a8f8b443e238f260f307bd41d86adc3136
|
[
"Apache-2.0"
] | null | null | null |
load.py
|
ontocord/create_pii_dataset
|
bfd246a8f8b443e238f260f307bd41d86adc3136
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright, 2021 Ontocord, LLC, All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datasets import load_dataset
import os
import re
import itertools
from re import finditer
import glob
import random
import fsspec
import json
from random import randint, choice
from collections import Counter
import spacy, itertools
import langid
from nltk.corpus import stopwords
import fsspec, os, gzip
from faker import Faker
from faker.providers import person, company, geo, address
from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer, MarianMTModel, AutoTokenizer, pipeline
import torch
import sys
from tqdm import tqdm
model_name = 'Helsinki-NLP/opus-mt-en-hi'
model = MarianMTModel.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
model_name = 'Helsinki-NLP/opus-mt-en-ar'
model = MarianMTModel.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
model_name = 'Helsinki-NLP/opus-mt-en-zh'
model = MarianMTModel.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M")
tokenizer = M2M100Tokenizer.from_pretrained("facebook/m2m100_418M")
nlp = spacy.load('en_core_web_lg')
stopwords_en = set(stopwords.words('english'))
| 33.454545
| 112
| 0.807609
| 257
| 1,840
| 5.688716
| 0.498054
| 0.055404
| 0.077975
| 0.094391
| 0.281122
| 0.237346
| 0.237346
| 0.218194
| 0.218194
| 0.218194
| 0
| 0.024089
| 0.120109
| 1,840
| 54
| 113
| 34.074074
| 0.878938
| 0.318478
| 0
| 0.176471
| 0
| 0
| 0.112187
| 0.062954
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.617647
| 0
| 0.617647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ef90aefef6921157afac229b23fbddf7cab99743
| 854
|
py
|
Python
|
help_desk/help_desk/doctype/department_name/department_name.py
|
shrikant9867/mycfohelpdesk
|
b285b156aec53ecff5873f4630638687ff5a0e92
|
[
"MIT"
] | null | null | null |
help_desk/help_desk/doctype/department_name/department_name.py
|
shrikant9867/mycfohelpdesk
|
b285b156aec53ecff5873f4630638687ff5a0e92
|
[
"MIT"
] | null | null | null |
help_desk/help_desk/doctype/department_name/department_name.py
|
shrikant9867/mycfohelpdesk
|
b285b156aec53ecff5873f4630638687ff5a0e92
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Indictrans and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import json
import string
from frappe.model.document import Document
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from erpnext.controllers.item_variant import get_variant, copy_attributes_to_variant, ItemVariantExistsError
class DepartmentName(Document):
def autoname(self):
self.name = self.department_abbriviation.upper()
def validate(self):
self.validate_name_of_department()
def validate_name_of_department(self):
if(self.name_of_department):
self.name_of_department = self.name_of_department.title()
if(self.department_abbriviation):
self.department_abbriviation = self.department_abbriviation.upper()
| 32.846154
| 108
| 0.799766
| 110
| 854
| 5.954545
| 0.481818
| 0.045802
| 0.122137
| 0.091603
| 0.210687
| 0.210687
| 0.091603
| 0.091603
| 0
| 0
| 0
| 0.006658
| 0.120609
| 854
| 26
| 109
| 32.846154
| 0.865513
| 0.137002
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.444444
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ef9feaf45807510f4cf448436f428cc436b0de04
| 744
|
py
|
Python
|
main.py
|
bhaskar-nair2/Coded-Passwords
|
306d01e54bf43c46267ed12c907a49932326b931
|
[
"MIT"
] | null | null | null |
main.py
|
bhaskar-nair2/Coded-Passwords
|
306d01e54bf43c46267ed12c907a49932326b931
|
[
"MIT"
] | null | null | null |
main.py
|
bhaskar-nair2/Coded-Passwords
|
306d01e54bf43c46267ed12c907a49932326b931
|
[
"MIT"
] | null | null | null |
import hashlib
class data:
def __init__(self,username,password):
self.username=username
self.hash=self.get_hash(password)
def get_hash(self,password):
for _ in range(0,9999999):
head=(str(_)+self.username+password).encode()
i=hashlib.sha3_256(head).hexdigest()
if(i[:4]=='0000'):
self.num=_
return i
@staticmethod
def retrive(username,password,hash,num):
head = (str(num) + username + password).encode()
i = hashlib.sha3_256(head).hexdigest()
if(i==hash):
return True
else:
return False
def maker(self):
arr = {"hash": self.hash, "num": self.num}
return arr
| 28.615385
| 57
| 0.555108
| 87
| 744
| 4.62069
| 0.402299
| 0.159204
| 0.099502
| 0.114428
| 0.263682
| 0.263682
| 0.263682
| 0.263682
| 0.263682
| 0.263682
| 0
| 0.041257
| 0.31586
| 744
| 26
| 58
| 28.615385
| 0.748527
| 0
| 0
| 0.086957
| 0
| 0
| 0.014765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173913
| false
| 0.26087
| 0.043478
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
efbe9e033668c8068ec57cb141083c350416dc90
| 1,668
|
py
|
Python
|
src/controllers/userController.py
|
gioliveirass/fatec-BDNR-MercadoLivre
|
dd2c407f6728e4f11e8292463cc2ba3ad562de1e
|
[
"MIT"
] | null | null | null |
src/controllers/userController.py
|
gioliveirass/fatec-BDNR-MercadoLivre
|
dd2c407f6728e4f11e8292463cc2ba3ad562de1e
|
[
"MIT"
] | null | null | null |
src/controllers/userController.py
|
gioliveirass/fatec-BDNR-MercadoLivre
|
dd2c407f6728e4f11e8292463cc2ba3ad562de1e
|
[
"MIT"
] | null | null | null |
import connectBD as connectDB
from pprint import pprint
def findSort():
mydb = connectDB.connect()
mycol = mydb.usuario
print("\n===========================")
print("==== TODOS OS USUARIOS ====")
print("===========================\n")
mydoc = mycol.find().sort("nome")
for x in mydoc:
pprint(x)
def insert(name, cpf):
mydb = connectDB.connect()
mycol = mydb.usuario
print("\n=========================")
print("=== USUARIO INSERIDO ===")
print("=========================\n")
mydict = { "nome": name, "cpf": cpf }
x = mycol.insert_one(mydict)
pprint(x.inserted_id)
print("Usuario cadastrado com sucesso.")
def findQuery(name):
mydb = connectDB.connect()
mycol = mydb.usuario
print("\n=========================")
print("==== USUARIO BUSCADO ====")
print("=========================\n")
myquery = { "nome": name }
mydoc = mycol.find(myquery)
for x in mydoc:
pprint(x)
def update(name, newName):
mydb = connectDB.connect()
mycol = mydb.usuario
print("\n============================")
print("==== USUARIO ATUALIZADO ====")
print("============================\n")
myquery = { "nome": name }
newvalues = { "$set": { "nome": newName } }
mycol.update_one(myquery, newvalues)
pprint("Usuario atualizado com sucesso.")
def delete(name):
mydb = connectDB.connect()
mycol = mydb.usuario
print("\n==========================")
print("==== USUARIO DELETADO ====")
print("==========================\n")
myquery = { "nome": name }
mycol.delete_one(myquery)
pprint("Usuario deletado com sucesso.")
| 30.327273
| 49
| 0.492206
| 162
| 1,668
| 5.04321
| 0.277778
| 0.073439
| 0.122399
| 0.152999
| 0.46022
| 0.383109
| 0.383109
| 0.331701
| 0.331701
| 0.274174
| 0
| 0
| 0.205036
| 1,668
| 55
| 50
| 30.327273
| 0.616139
| 0
| 0
| 0.42
| 0
| 0
| 0.319952
| 0.168963
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.04
| 0
| 0.14
| 0.44
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
efe02360bc1283274b4bc2434f2af992e192e9a4
| 7,403
|
py
|
Python
|
package/tests/test_cp/test_azure/test_domain/test_services/test_vm_credentials_service.py
|
tim-spiglanin/Azure-Shell
|
58c52994f0d6cfd798c5dca33737419ec18363d4
|
[
"Apache-2.0"
] | 5
|
2016-09-08T08:33:47.000Z
|
2020-02-10T12:31:15.000Z
|
package/tests/test_cp/test_azure/test_domain/test_services/test_vm_credentials_service.py
|
tim-spiglanin/Azure-Shell
|
58c52994f0d6cfd798c5dca33737419ec18363d4
|
[
"Apache-2.0"
] | 505
|
2016-08-09T07:41:03.000Z
|
2021-02-08T20:26:46.000Z
|
package/tests/test_cp/test_azure/test_domain/test_services/test_vm_credentials_service.py
|
tim-spiglanin/Azure-Shell
|
58c52994f0d6cfd798c5dca33737419ec18363d4
|
[
"Apache-2.0"
] | 5
|
2016-12-21T12:52:55.000Z
|
2021-07-08T09:50:42.000Z
|
from unittest import TestCase
import mock
from cloudshell.cp.azure.domain.services.vm_credentials_service import VMCredentialsService
from cloudshell.cp.azure.models.vm_credentials import VMCredentials
class TestVMCredentialsService(TestCase):
def setUp(self):
self.test_username = "test_username"
self.test_password = "testPassword123"
self.test_group_name = "test_username"
self.test_storage_name = "test_storage_name"
self.test_storage_service = mock.MagicMock()
self.test_key_pair_service = mock.MagicMock()
self.test_storage_client = mock.MagicMock()
self.vm_credentials = VMCredentialsService()
def test_generate_password(self):
"""Check that method will generate password with given length and with digit and uppercase letter"""
# Act
password = self.vm_credentials._generate_password(19)
# Verify
self.assertEqual(len(password), 19)
self.assertTrue(any(char.isdigit() for char in password),
msg="Generated password must contain at least one digit character")
self.assertTrue(any(char.isupper() for char in password),
msg="Generated password must contain at least one uppercase character")
@mock.patch("cloudshell.cp.azure.domain.services.vm_credentials_service.AuthorizedKey")
def test_get_ssh_key(self, authorized_key_class):
"""Check that method will return cloudshell.cp.azure.models.authorized_key.AuthorizedKey instance"""
authorized_key_class.return_value = authorized_key = mock.MagicMock()
ssh_key = self.vm_credentials._get_ssh_key(
username=self.test_username,
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.assertIs(ssh_key, authorized_key)
@mock.patch("cloudshell.cp.azure.domain.services.vm_credentials_service.OperatingSystemTypes")
def test_prepare_credentials_with_windows_os_type(self, os_types):
"""Check that method will call _prepare_windows_credentials and return VMCredentials model instance"""
self.vm_credentials._prepare_windows_credentials = mock.MagicMock(return_value=(self.test_username,
self.test_password))
vm_creds = self.vm_credentials.prepare_credentials(
os_type=os_types.windows,
username=self.test_username,
password=self.test_password,
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.vm_credentials._prepare_windows_credentials.assert_called_once_with(self.test_username, self.test_password)
self.assertIsInstance(vm_creds, VMCredentials)
@mock.patch("cloudshell.cp.azure.domain.services.vm_credentials_service.OperatingSystemTypes")
def test_prepare_credentials_with_linux_os_type(self, os_types):
"""Check that method will call _prepare_linux_credentials and return VMCredentials model instance"""
# from azure.mgmt.compute.models import OperatingSystemTypes
self.vm_credentials._prepare_linux_credentials = mock.MagicMock(return_value=(self.test_username,
self.test_password,
mock.MagicMock()))
vm_creds = self.vm_credentials.prepare_credentials(
os_type=os_types.linux,
username=self.test_username,
password=self.test_password,
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.vm_credentials._prepare_linux_credentials.assert_called_once_with(
username=self.test_username,
password=self.test_password,
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.assertIsInstance(vm_creds, VMCredentials)
def test_prepare_windows_credentials(self):
"""Check that method will return same credentials if username and password were provided"""
username, password = self.vm_credentials._prepare_windows_credentials(self.test_username, self.test_password)
self.assertEqual(username, self.test_username)
self.assertEqual(password, self.test_password)
def test_prepare_windows_credentials_without_user_and_password(self):
"""Check that method will return default username and generate password if credentials weren't provided"""
generated_pass = mock.MagicMock()
self.vm_credentials._generate_password = mock.MagicMock(return_value=generated_pass)
username, password = self.vm_credentials._prepare_windows_credentials("", "")
self.assertEqual(username, self.vm_credentials.DEFAULT_WINDOWS_USERNAME)
self.assertEqual(password, generated_pass)
def test_prepare_linux_credentials(self):
"""Check that method will return same credentials if username and password were provided"""
username, password, ssh_key = self.vm_credentials._prepare_linux_credentials(
username=self.test_username,
password=self.test_password,
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.assertEqual(username, self.test_username)
self.assertEqual(password, self.test_password)
self.assertIsNone(ssh_key)
def test_prepare_linux_credentials_without_user_and_password(self):
"""Check that method will return default username and ssh_key if credentials weren't provided"""
returned_ssh_key = mock.MagicMock()
self.vm_credentials._get_ssh_key = mock.MagicMock(return_value=returned_ssh_key)
username, password, ssh_key = self.vm_credentials._prepare_linux_credentials(
username="",
password="",
storage_service=self.test_storage_service,
key_pair_service=self.test_key_pair_service,
storage_client=self.test_storage_client,
group_name=self.test_group_name,
storage_name=self.test_storage_name)
self.assertEqual(username, self.vm_credentials.DEFAULT_LINUX_USERNAME)
self.assertEqual(password, "")
self.assertEqual(ssh_key, returned_ssh_key)
| 51.409722
| 120
| 0.685533
| 830
| 7,403
| 5.761446
| 0.121687
| 0.097031
| 0.065872
| 0.050188
| 0.780636
| 0.665412
| 0.609159
| 0.582601
| 0.571936
| 0.549352
| 0
| 0.001253
| 0.245171
| 7,403
| 143
| 121
| 51.769231
| 0.85451
| 0.110226
| 0
| 0.481132
| 0
| 0
| 0.062958
| 0.035147
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0.084906
| false
| 0.264151
| 0.037736
| 0
| 0.132075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
efe7d81ac7833b8ba25967361da1b664addd861c
| 498
|
py
|
Python
|
setup.py
|
nicosandller/python-ethereumrpc
|
e826f99bbb34dc3d8009ac9392677e9ae2c9fa36
|
[
"MIT"
] | 1
|
2019-03-28T19:16:21.000Z
|
2019-03-28T19:16:21.000Z
|
setup.py
|
nicosandller/python-ethereumrpc
|
e826f99bbb34dc3d8009ac9392677e9ae2c9fa36
|
[
"MIT"
] | null | null | null |
setup.py
|
nicosandller/python-ethereumrpc
|
e826f99bbb34dc3d8009ac9392677e9ae2c9fa36
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
setup(
name = 'python-ethereumrpc',
packages = ['python-ethereumrpc'],
version = '0.1',
description = 'A python interface for ethereum JSON-RPC service.',
author = 'Nicolas Sandller',
author_email = '[email protected]',
url = 'https://github.com/nicosandller/python-ethereumrpc',
download_url = 'https://github.com/nicosandller/python-ethereumrpc/tarball/0.1',
keywords = ['ethereum', 'rpc', 'api', 'JSON', 'JSON-RPC'],
classifiers = [],
)
| 35.571429
| 82
| 0.696787
| 58
| 498
| 5.948276
| 0.603448
| 0.197101
| 0.081159
| 0.098551
| 0.266667
| 0.266667
| 0.266667
| 0
| 0
| 0
| 0
| 0.009281
| 0.134538
| 498
| 13
| 83
| 38.307692
| 0.791183
| 0
| 0
| 0
| 0
| 0
| 0.53012
| 0.044177
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4bca99ba9eda853683218d8ee0882faa531e6181
| 3,531
|
py
|
Python
|
companion_app/live_log_retrieval.py
|
MorganJamesSmith/uni-project-ideas
|
6c48d0edb526908ed95192e97ab47df1257b6036
|
[
"BSD-3-Clause"
] | 1
|
2020-09-15T15:33:33.000Z
|
2020-09-15T15:33:33.000Z
|
companion_app/live_log_retrieval.py
|
MorganJamesSmith/uni-project
|
6c48d0edb526908ed95192e97ab47df1257b6036
|
[
"BSD-3-Clause"
] | null | null | null |
companion_app/live_log_retrieval.py
|
MorganJamesSmith/uni-project
|
6c48d0edb526908ed95192e97ab47df1257b6036
|
[
"BSD-3-Clause"
] | null | null | null |
"""
implements a wrapper for loading live data from the serial connection and passing it to plotting
"""
import serial
import time
import struct
import plotly.express as px
try:
from . import log_parser
except ImportError:
import log_parser
# TODO: clean up CLI code
class LiveLogFile():
def __init__(self, serial_device_name: str="/dev/ttyACM0", initial_file_offset = -1,
callback_to_call_right_before_grabbing_new_data=lambda:None,
callback_to_call_when_caught_up_with_data=lambda:time.sleep(1)):
self.serial_device_name = serial_device_name
self.internal_buffer: bytes = b""
self.log_file_offset: int = initial_file_offset
self.sleep_callback = callback_to_call_when_caught_up_with_data
self.before_serial_hook = callback_to_call_right_before_grabbing_new_data
def read(self, nbytes=1):
if len(self.internal_buffer) < nbytes:
new_data = self.read_from_device()
self.internal_buffer = self.internal_buffer + new_data
if len(self.internal_buffer) < nbytes:
import warnings
warnings.warn("reading data from device didn't produce enough content to keep going.")
togive = self.internal_buffer[:nbytes]
self.internal_buffer = self.internal_buffer[nbytes:]
return togive
def read_from_device(self):
self.before_serial_hook()
with serial.Serial(self.serial_device_name) as conn:
if self.log_file_offset == -1:
self.set_offset_to_last_reset(conn)
print("READING FROM DEVICE")
command_to_send = f"hcat P21 {self.log_file_offset}\n\r".encode()
hex_data = self.interact_command(conn, command_to_send)
# if len(hex_data) < 20:
# print(f"small data: {hex_data!r}")
if hex_data == "" or hex_data.isspace(): # only \n\r
# we have caught up with live data, need to sleep for a bit
self.sleep_callback()
hex_data = self.interact_command(conn, command_to_send)
result = bytes.fromhex(hex_data)
self.log_file_offset += len(result)
return result
def set_offset_to_last_reset(self, conn):
"""sets the current tracking offset to the last reset found"""
data = bytes.fromhex(self.interact_command(conn,b"hcat P21_OFF\n\r"))
# last reset is just the last 4 bytes
assert len(data)%4 == 0, "length of P21_OFF is not a multiple of 32 bits"
[last_reset_offset] = struct.unpack("I",data[-4:])
self.log_file_offset = last_reset_offset
@staticmethod
def interact_command(conn, command):
conn.write(command)
data_we_just_sent_and_want_to_ignore = conn.read_until(b"\n\r")
if command != data_we_just_sent_and_want_to_ignore:
import warnings; warnings.warn(f"sent: {command!r} but saw back {data_we_just_sent_and_want_to_ignore!r}")
hex_data = conn.read_until(b"> ")
return hex_data.decode().rpartition("> ")[0]
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", nargs="?",default="/dev/ttyACM0")
ns = parser.parse_args()
for [type, *fields] in log_parser.parse_data(log_parser.parse_raw_entries(LiveLogFile(ns.input_file))):
if type != 4:
continue # ignore all but IMU data
print(*map("{:>8}".format, fields), sep=",")
| 46.460526
| 118
| 0.664118
| 490
| 3,531
| 4.473469
| 0.328571
| 0.028741
| 0.065693
| 0.038777
| 0.230383
| 0.212135
| 0.152828
| 0.152828
| 0.07208
| 0
| 0
| 0.008576
| 0.240442
| 3,531
| 75
| 119
| 47.08
| 0.808725
| 0.10422
| 0
| 0.063492
| 0
| 0
| 0.099873
| 0.020992
| 0
| 0
| 0
| 0.013333
| 0.015873
| 1
| 0.079365
| false
| 0
| 0.15873
| 0
| 0.301587
| 0.031746
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4bcf6cd36f0a7b205b865ac0d3e32dffbb450890
| 1,114
|
py
|
Python
|
Modules/ZoneAnalysis_FULL.py
|
amaurijp/BioSPA
|
fd10b58c5a6aa444f34690a98cd939dd5111c4d5
|
[
"MIT"
] | 3
|
2019-10-29T17:26:24.000Z
|
2021-01-08T22:15:17.000Z
|
Modules/ZoneAnalysis_FULL.py
|
amaurijp/BioSPA
|
fd10b58c5a6aa444f34690a98cd939dd5111c4d5
|
[
"MIT"
] | null | null | null |
Modules/ZoneAnalysis_FULL.py
|
amaurijp/BioSPA
|
fd10b58c5a6aa444f34690a98cd939dd5111c4d5
|
[
"MIT"
] | null | null | null |
def ZoneAnalysis_FULL(SettingsDic):
import MODZoneAnalysis
import os
diretorio=os.getcwd()
# Análises das regioes com bacterias
MODZoneAnalysis.DetVolume(diretorio,
importstackRootName=SettingsDic['FolderName'],
FirstStack=1,LastStack=SettingsDic['timePoints'],
FirstSlice=1,LastSlice=SettingsDic['SliceNumber'],
TXTExportDir='/ExportedData/VolumeValues',
importformat=SettingsDic['imageFormat'],
RegionAnalysis=False)
'''
# Análises das regioes com EPS
MODZoneAnalysis.DetVolume(diretorio,
importstackRootName='/BinarizedCorr/EPS_THR4',
FirstStack=1,LastStack=18,
FirstSlice=1,LastSlice=123,
TXTExportDir='/VolumeValues/EPS_THR4',
importformat='.png',
RegionAnalysis=False)
'''
| 37.133333
| 80
| 0.502693
| 68
| 1,114
| 8.191176
| 0.544118
| 0.039497
| 0.064632
| 0.075404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017081
| 0.421903
| 1,114
| 30
| 81
| 37.133333
| 0.847826
| 0.030521
| 0
| 0
| 0
| 0
| 0.109325
| 0.041801
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.363636
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
4bcf87fdcdb2f4bd16f622a1e7e79b1aeb825b7c
| 3,448
|
py
|
Python
|
server/Kusa/views.py
|
meshellchoo/senior-design-project-kusa
|
829575259c31a620c895a0f2d5654ea099298eb6
|
[
"MIT"
] | 1
|
2022-03-28T23:20:09.000Z
|
2022-03-28T23:20:09.000Z
|
server/Kusa/views.py
|
meshellchoo/senior-design-project-kusa
|
829575259c31a620c895a0f2d5654ea099298eb6
|
[
"MIT"
] | null | null | null |
server/Kusa/views.py
|
meshellchoo/senior-design-project-kusa
|
829575259c31a620c895a0f2d5654ea099298eb6
|
[
"MIT"
] | 2
|
2022-03-24T07:17:27.000Z
|
2022-03-28T23:20:18.000Z
|
from django.http import HttpResponse
from django.http.response import JsonResponse
from django.shortcuts import render
from rest_framework.serializers import Serializer
from admin import settings
import requests
from rest_framework import viewsets
from time import gmtime, strftime
from Kusa.models import SteamUser
from django.views.decorators.csrf import csrf_exempt
from bson import ObjectId
import json
from smtplib import SMTPException
from django.http import BadHeaderError
from django.http.response import JsonResponse
from django.shortcuts import redirect
from admin import settings
from admin.settings import FRONTEND_URL
from Kusa.authentication import get_token
from Kusa.authentication import validate_token
from collections import OrderedDict # keep this line for get_user_daily_hours
from datetime import datetime
from django.core.mail import send_mail
from Kusa.data_collection import get_steam_user
JWT_SECRET_KEY = settings.JWT_SECRET_KEY
conf = settings.CONF
@csrf_exempt
def add_post(request):
friendList = request.POST.get("FriendList").split(",")
friendRequest = request.POST.get("FriendRequest").split(",")
dummy=SteamUser(Name=request.POST.get("Name"),SteamID = request.POST.get("SteamID"),FriendList=friendList,FriendRequest=friendRequest)
dummy.save()
return HttpResponse("Inserted")
def close_view(request):
response = redirect(FRONTEND_URL + '/steamauth')
token = get_token(request)
response.set_cookie('token', (token), max_age=1000)
return response
def get_user_daily_hours(request):
"""
will return an array of the user's daily hours
Parameters: request
Returns: returns a list of json obj -> [{"date" : date1, "hours" : num_hours1},{"date" : date2, "hours" : num_hours2}]
"""
response = validate_token(request)
if "steamid" in response:
user = get_steam_user(response["steamid"])
daily_hours = user['daily_hours']
list_of_json = [dict(day) for day in eval(daily_hours)]
return JsonResponse(list_of_json, safe=False)
else:
return response
def get_user_achievements(request):
"""
Returns: returns a list of json obj -> [{id" : 1, "progress" : 0, "date_achieved" : "N/A"},...,{id" : 10, "progress" : 20, "date_achieved" : "03/10/2022"}]
"""
response = validate_token(request)
if "steamid" in response:
user = get_steam_user(response["steamid"])
achievements = user['achievements']
list_of_json = [dict(a) for a in eval(achievements)]
return JsonResponse(list_of_json , safe=False)
else:
return response
def send_user_email(steam_id):
success = False
user = get_steam_user(steam_id)
daily_hours = user['daily_hours']
goal = user['goal']
list_of_json = [dict(day) for day in eval(daily_hours)]
sum = 0
for value in list_of_json:
if(datetime.today().isocalendar()[1] == datetime.strptime(value['date'], "%m/%d/%Y").isocalendar()[1]):
sum += value['hours']
if(sum > goal):
try:
send_mail("Kusa Playtime Exceeded", 'You exceeded your goal for this week! Better luck next time. Remember, you can change your goal in the Kusa app.', settings.EMAIL_HOST_USER, [user['email']], fail_silently=False)
success = True
except SMTPException as e:
print(e)
return JsonResponse({'success': success}, safe=False)
| 36.680851
| 231
| 0.706787
| 454
| 3,448
| 5.220264
| 0.328194
| 0.037975
| 0.033755
| 0.020253
| 0.265401
| 0.232068
| 0.232068
| 0.232068
| 0.202532
| 0.202532
| 0
| 0.008948
| 0.189675
| 3,448
| 93
| 232
| 37.075269
| 0.839299
| 0.111079
| 0
| 0.287671
| 0
| 0.013699
| 0.095175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068493
| false
| 0
| 0.328767
| 0
| 0.493151
| 0.013699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
4bdf2c801d395b3543ef88d753e14f32dd4a9b4a
| 362
|
py
|
Python
|
Activation Function/Softmax/softmax_cpp/test.py
|
kaka-lin/ML-Notes
|
047b88d59346b2ec719b1b3e2fcd605e1ccfaf91
|
[
"MIT"
] | null | null | null |
Activation Function/Softmax/softmax_cpp/test.py
|
kaka-lin/ML-Notes
|
047b88d59346b2ec719b1b3e2fcd605e1ccfaf91
|
[
"MIT"
] | null | null | null |
Activation Function/Softmax/softmax_cpp/test.py
|
kaka-lin/ML-Notes
|
047b88d59346b2ec719b1b3e2fcd605e1ccfaf91
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy.special import softmax
np.set_printoptions(precision=6)
def k_softmax(x):
exp = np.exp(x)
return exp / np.sum(exp, axis=1)
if __name__ == "__main__":
x = np.array([[1, 4.2, 0.6, 1.23, 4.3, 1.2, 2.5]])
print("Input Array: ", x)
print("Softmax Array: ", k_softmax(x))
print("Softmax Array: ", softmax(x))
| 21.294118
| 54
| 0.618785
| 63
| 362
| 3.380952
| 0.507937
| 0.112676
| 0.084507
| 0.169014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055363
| 0.201657
| 362
| 16
| 55
| 22.625
| 0.681661
| 0
| 0
| 0
| 0
| 0
| 0.140884
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.181818
| 0
| 0.363636
| 0.363636
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4bfe5926292aa222488a49dbf22dd03f8782815e
| 1,405
|
py
|
Python
|
exercises/pt/test_01_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085
|
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/pt/test_01_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79
|
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/pt/test_01_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361
|
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
import spacy.matcher
assert isinstance(
matcher, spacy.matcher.Matcher
), "Você está inicializando o Comparador corretamente?"
assert (
"Matcher(nlp.vocab)" in __solution__
), "Você está inicializando o Comparador corretamente com o vocabulário compartilhado?"
assert (
len(pattern) == 2
), "A expressão deve descrever dois tokens (dois dicionários)."
assert isinstance(pattern[0], dict) and isinstance(
pattern[1], dict
), "Cada item da expressão deve conter um dicionário."
assert (
len(pattern[0]) == 1 and len(pattern[1]) == 1
), "Cada item na expressão deve conter apenas uma chave."
assert any(
pattern[0].get(key) == "iPhone" for key in ["text", "TEXT"]
), "Você está fazendo a comparação com o texto do token?"
assert any(
pattern[1].get(key) == "X" for key in ["text", "TEXT"]
), "Você está fazendo a comparação com o texto do token?"
assert (
'matcher.add("IPHONE_X_PATTERN"' in __solution__
), "Você está adicionando a expressão corretamente?"
assert (
"matches = matcher(doc)" in __solution__
), "Você está chamando o Comparador passando o doc como parâmetro?"
__msg__.good(
"Parabéns! Você identificou uma correspondência com sucesso: dois tokens "
"em doc[1:3] que correspondem a partição 'iPhone X'. "
)
| 39.027778
| 91
| 0.646263
| 177
| 1,405
| 5.028249
| 0.423729
| 0.053933
| 0.047191
| 0.060674
| 0.242697
| 0.242697
| 0.14382
| 0.14382
| 0.14382
| 0.14382
| 0
| 0.010397
| 0.246975
| 1,405
| 35
| 92
| 40.142857
| 0.830813
| 0
| 0
| 0.272727
| 0
| 0
| 0.513167
| 0.021352
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.030303
| true
| 0.030303
| 0.030303
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef015b72b0d9f9a36582b5d4563b3165aa3bb897
| 1,206
|
py
|
Python
|
tests/test_utils.py
|
yiannisha/dbmanage
|
9e1e36e2b59e7e369595f4804bef2c2a7ec0ec56
|
[
"Apache-2.0"
] | null | null | null |
tests/test_utils.py
|
yiannisha/dbmanage
|
9e1e36e2b59e7e369595f4804bef2c2a7ec0ec56
|
[
"Apache-2.0"
] | 10
|
2021-11-06T18:12:54.000Z
|
2021-12-01T18:49:29.000Z
|
tests/test_utils.py
|
yiannisha/dbmanage
|
9e1e36e2b59e7e369595f4804bef2c2a7ec0ec56
|
[
"Apache-2.0"
] | null | null | null |
""" Utilities for testing """
import os
import json
TESTDATADIR = os.path.join(os.path.dirname(__file__), 'testdata')
def get_pass(pass_name : str) -> str:
""" Returns pass from test_credentials.json """
creds_path = os.path.join(os.path.dirname(__file__), 'test_credentials.json')
with open(creds_path, 'r', encoding='utf-8') as f:
for line in f.readlines():
creds = json.loads(line)
return creds[pass_name]
def read_temp_file(filename: str, delete = True, stdout: str = '', stderr: str = '') -> str:
""" Reads temp file and returns contents """
# wait for file to be generated
print(f'Waiting for {filename} file...')
try:
while(not os.path.exists(filename)):
pass
except KeyboardInterrupt as e:
error_msg = f'Stdout: {stdout}\nStderr: {stderr}\n'
raise Exception(error_msg)
# read file
with open(filename, 'r', encoding='utf-8') as f:
out_str = ''.join([line for line in f.readlines()])
# delete file
if delete and os.path.exists(filename):
try:
os.remove(filename)
except:
print(f'{filename} file already removed')
return out_str
| 28.046512
| 93
| 0.619403
| 162
| 1,206
| 4.481481
| 0.425926
| 0.049587
| 0.027548
| 0.033058
| 0.170799
| 0.118457
| 0.07438
| 0
| 0
| 0
| 0
| 0.002203
| 0.247098
| 1,206
| 42
| 94
| 28.714286
| 0.797357
| 0.127695
| 0
| 0.08
| 0
| 0
| 0.133981
| 0.020388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0.12
| 0.08
| 0
| 0.24
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ef0b1e90a414cd10b99ab947636c1ca2151cab55
| 430
|
py
|
Python
|
flatlist/__init__.py
|
dwabece/flatlist
|
61b6f7f70bf9db2bf14f8bfdebce2c4f9a95811f
|
[
"WTFPL"
] | null | null | null |
flatlist/__init__.py
|
dwabece/flatlist
|
61b6f7f70bf9db2bf14f8bfdebce2c4f9a95811f
|
[
"WTFPL"
] | null | null | null |
flatlist/__init__.py
|
dwabece/flatlist
|
61b6f7f70bf9db2bf14f8bfdebce2c4f9a95811f
|
[
"WTFPL"
] | null | null | null |
__version__ = '0.0.1'
def flatten_list(input_list):
"""
Flattens list with many nested lists.
>>> flatten_list([1, [2, [3], [4]]])
[1, 2, 3, 4]
"""
result = []
for item in input_list:
if isinstance(item, list):
result.extend(flatten_list(item))
# yield from flatten_list(item)
else:
result.append(item)
# yield item
return result
| 20.47619
| 45
| 0.532558
| 53
| 430
| 4.132075
| 0.509434
| 0.200913
| 0.027397
| 0.03653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.334884
| 430
| 20
| 46
| 21.5
| 0.727273
| 0.302326
| 0
| 0
| 0
| 0
| 0.018182
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef0dbb4129bccb5de4e10f51b60990b9ac3393bb
| 607
|
py
|
Python
|
slackcast/token.py
|
rbdixon/slackcast
|
ac4ac4591bbcf62d64ec05b5479e6e8315f92a69
|
[
"MIT"
] | null | null | null |
slackcast/token.py
|
rbdixon/slackcast
|
ac4ac4591bbcf62d64ec05b5479e6e8315f92a69
|
[
"MIT"
] | 1
|
2021-11-15T17:47:27.000Z
|
2021-11-15T17:47:27.000Z
|
slackcast/token.py
|
rbdixon/slackcast
|
ac4ac4591bbcf62d64ec05b5479e6e8315f92a69
|
[
"MIT"
] | null | null | null |
import os
import keyring
from prompt_toolkit import prompt
KEY = ('slackcast', 'token')
SLACKCAST_INSTALL_URL = os.environ.get(
'SLACKCAST_INSTALL_URL', 'https://slackcast.devtestit.com/install'
)
def get_token():
# For testing
token = os.environ.get('SLACKCAST_TOKEN', None)
if token is None:
token = keyring.get_password(*KEY)
if token is None:
raw_token = prompt(f'Visit {SLACKCAST_INSTALL_URL}, approve, and enter token: ')
if raw_token.startswith('xoxp-'):
token = raw_token
keyring.set_password(*KEY, token)
return token
| 22.481481
| 88
| 0.667216
| 78
| 607
| 5.012821
| 0.423077
| 0.122762
| 0.14578
| 0.107417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224053
| 607
| 26
| 89
| 23.346154
| 0.830149
| 0.018122
| 0
| 0.117647
| 0
| 0
| 0.254637
| 0.075885
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0.117647
| 0.176471
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ef0f41777334766f27b085f4b278863d8beee416
| 790
|
py
|
Python
|
baidupan.py
|
iSteveyang/GraduateDesign-pyqt
|
ce4e6c8b0de2398081a83c63fb98cc03126bc6d0
|
[
"MIT"
] | null | null | null |
baidupan.py
|
iSteveyang/GraduateDesign-pyqt
|
ce4e6c8b0de2398081a83c63fb98cc03126bc6d0
|
[
"MIT"
] | null | null | null |
baidupan.py
|
iSteveyang/GraduateDesign-pyqt
|
ce4e6c8b0de2398081a83c63fb98cc03126bc6d0
|
[
"MIT"
] | null | null | null |
import progressbar
from baidupcsapi import PCS
class ProgressBar():
def __init__(self):
self.first_call = True
def __call__(self, *args, **kwargs):
if self.first_call:
self.widgets = [progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker('>')),
' ', progressbar.ETA()]
self.pbar = progressbar.ProgressBar(widgets=self.widgets, maxval=kwargs['size']).start()
self.first_call = False
if kwargs['size'] <= kwargs['progress']:
self.pbar.finish()
else:
self.pbar.update(kwargs['progress'])
pcs = PCS('username','password')
test_file = open('bigfile.pdf','rb').read()
ret = pcs.upload('/',test_file,'bigfile.pdf',callback=ProgressBar())
| 34.347826
| 115
| 0.611392
| 84
| 790
| 5.595238
| 0.5
| 0.057447
| 0.082979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231646
| 790
| 22
| 116
| 35.909091
| 0.7743
| 0
| 0
| 0
| 0
| 0
| 0.086076
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0.055556
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ef1bad6bf6953bfcc6d21e0a6fe6026bfa17d421
| 286
|
py
|
Python
|
desafio64.py
|
DantonMatheus/desafios-python
|
709a3f1774596fc536dd4b882c78a6b951c92a9c
|
[
"MIT"
] | null | null | null |
desafio64.py
|
DantonMatheus/desafios-python
|
709a3f1774596fc536dd4b882c78a6b951c92a9c
|
[
"MIT"
] | null | null | null |
desafio64.py
|
DantonMatheus/desafios-python
|
709a3f1774596fc536dd4b882c78a6b951c92a9c
|
[
"MIT"
] | null | null | null |
print('===== DESAFIO 64 =====')
num = 0
cont = 0
soma = 0
num = int(input('Digite um número [999 para SAIR]: '))
while num != 999:
soma += num
cont += 1
num = int(input('Digite um número [999 para SAIR]: '))
print(f'Você digitou {cont} números! A soma entre eles é {soma}')
| 26
| 65
| 0.594406
| 46
| 286
| 3.695652
| 0.543478
| 0.070588
| 0.129412
| 0.2
| 0.423529
| 0.423529
| 0.423529
| 0.423529
| 0.423529
| 0
| 0
| 0.067265
| 0.22028
| 286
| 10
| 66
| 28.6
| 0.695067
| 0
| 0
| 0.2
| 0
| 0
| 0.506993
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef1c14040a2c37814d24485011b2191f84d572dc
| 325
|
py
|
Python
|
pytify/strategy.py
|
EngineeringIsLife/Pytify
|
ae9a351144cb8f5556740d33cdf29073ffd2dc1e
|
[
"MIT"
] | null | null | null |
pytify/strategy.py
|
EngineeringIsLife/Pytify
|
ae9a351144cb8f5556740d33cdf29073ffd2dc1e
|
[
"MIT"
] | null | null | null |
pytify/strategy.py
|
EngineeringIsLife/Pytify
|
ae9a351144cb8f5556740d33cdf29073ffd2dc1e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from sys import platform
def get_pytify_class_by_platform():
if 'linux' in platform:
from linux import Linux
return Linux
elif 'darwin' in platform:
from darwin import Darwin
return Darwin
else:
raise Exception('%s is not supported.' % platform)
| 21.666667
| 58
| 0.630769
| 41
| 325
| 4.902439
| 0.609756
| 0.099502
| 0.139303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004329
| 0.289231
| 325
| 14
| 59
| 23.214286
| 0.865801
| 0.064615
| 0
| 0
| 0
| 0
| 0.102649
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| true
| 0
| 0.3
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef20178603cd20e2dd144ff595f24f1bbc671045
| 282
|
py
|
Python
|
django_mediamosa/templatetags/mediamosa_extras.py
|
UGentPortaal/django-mediamosa
|
553a725cd02e8dd2489bf25a613c9b98155cf90d
|
[
"BSD-3-Clause"
] | null | null | null |
django_mediamosa/templatetags/mediamosa_extras.py
|
UGentPortaal/django-mediamosa
|
553a725cd02e8dd2489bf25a613c9b98155cf90d
|
[
"BSD-3-Clause"
] | null | null | null |
django_mediamosa/templatetags/mediamosa_extras.py
|
UGentPortaal/django-mediamosa
|
553a725cd02e8dd2489bf25a613c9b98155cf90d
|
[
"BSD-3-Clause"
] | null | null | null |
from django import template
register = template.Library()
@register.filter
def mimetype(value, mime_type):
mediafiles = []
for mediafile in value:
if mediafile.metadata.get('mime_type') == mime_type:
mediafiles.append(mediafile)
return mediafiles
| 21.692308
| 60
| 0.695035
| 32
| 282
| 6.03125
| 0.65625
| 0.124352
| 0.186529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 282
| 12
| 61
| 23.5
| 0.869369
| 0
| 0
| 0
| 0
| 0
| 0.031915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef25c53ea4c0fb58041ed1cd6cded53b4e340d23
| 10,942
|
py
|
Python
|
v0/aia_eis_v0/ml_sl/rf/dt_main.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/ml_sl/rf/dt_main.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/ml_sl/rf/dt_main.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
import copy
from utils.file_utils.dataset_reader_pack.ml_dataset_reader import get_TV_T_dataset, get_T_V_T_dataset
from ml_sl.rf.dt_0 import Node, save_node, load_node
from ml_sl.ml_data_wrapper import pack_list_2_list, single_point_list_2_list, reform_labeled_dataset_list
from ml_sl.ml_data_wrapper import split_labeled_dataset_list
from utils.file_utils.filename_utils import get_date_prefix
from ml_sl.ml_critrions import cal_accuracy, cal_kappa, cal_accuracy_on_2, cal_accuracy_on_3
label_list = [2, 4, 5, 6, 7, 8, 9]
# Import dataset (Training, validation, Test)
ml_dataset_pickle_file_path = '../../datasets/ml_datasets/normed'
tr_dataset, va_dataset, te_dataset = get_T_V_T_dataset(file_path=ml_dataset_pickle_file_path)
tr_va_dataset, test_dataset = get_TV_T_dataset(file_path=ml_dataset_pickle_file_path)
tr_label_list, tr_data_list = split_labeled_dataset_list(tr_dataset)
va_label_list, va_data_list = split_labeled_dataset_list(va_dataset)
tr_va_label_list, tr_va_data_list = split_labeled_dataset_list(tr_va_dataset)
te_label_list, te_data_list = split_labeled_dataset_list(te_dataset)
# --------------------- 1-No Pruning ---------------------
def dt_no_pruning(training_dataset, validation_dataset, test_dataset, label_list=[2,4,5,6,7,8,9]):
tr_va_dataset = training_dataset + validation_dataset
reformed_tr_va_dataset = reform_labeled_dataset_list(tr_va_dataset)
# 1.1- Use [training+validation]-dataset to train a Decision Tree (DT), DT0,
dt = Node(reformed_labeled_dataset_list = reformed_tr_va_dataset, level = 0)
dt.create_child_node()
# 1.2- save DT
dt_file_name = get_date_prefix() + 'dt_no_pruning_pickle.file'
save_node(node=dt, file_name=dt_file_name)
# 1.3- Test the performance(accuracy, kappa) of DT0 on test-dataset
test_label_list = [t[0] for t in test_dataset]
sample_label_prob_dict_list = []
empty_sample_label_prob_dict = {}
for label in label_list:
empty_sample_label_prob_dict[label] = 0.0
for t_d in test_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
sample_label_prob_dict_list.append(sample_label_prob_dict)
acc = cal_accuracy(sample_label_prob_dict_list, test_label_list)
kappa = cal_kappa(sample_label_prob_dict_list, test_label_list)
return acc, kappa
#------------- Train on tr, tested on va #-------------
# acc,kappa = dt_no_pruning(training_dataset=tr_dataset, validation_dataset=[], test_dataset=tr_dataset)
# print(acc,kappa) # --> 1.0 1.0
#------------- Train on tr, tested on va #-------------
# if __name__ == '__main__':
# training_dataset, validation_dataset, test_dataset = get_T_V_T_dataset(file_path='../../datasets/ml_datasets/normed')
# Running condition-1
# acc, kappa = dt_no_pruning(training_dataset, validation_dataset, test_dataset)
# print('Accuracy: {0}, Kappa: {1}'.format(acc, kappa))
# Running condition-2
# acc, kappa = dt_no_pruning(training_dataset, validation_dataset=[], test_dataset=validation_dataset)
# print('Accuracy: {0}, Kappa: {1}'.format(acc, kappa))
"""
Running condition-1
Train on [Training+validation]-dataset
Test on test-dataset
1-Accuracy: 0.45054945054945056, Kappa: 0.3173293323330833
2-Accuracy: 0.45054945054945056, Kappa: 0.3173293323330833
Running condition-2
Train on [Training]-dataset
Test on validation-dataset
1-Accuracy: 0.5319148936170213, Kappa: 0.42762247439800716
2-Accuracy: 0.5319148936170213, Kappa: 0.42762247439800716
"""
def load_dt_no_pruning(training_dataset, validation_dataset, test_dataset, label_list=[2,4,5,6,7,8,9]):
dt = load_node(file_name='2020_04_11_dt_no_pruning_pickle.file', file_path='dt_res')
# 1.3- Test the performance(accuracy, kappa) of DT0 on test-dataset
test_label_list = [t[0] for t in test_dataset]
sample_label_prob_dict_list = []
empty_sample_label_prob_dict = {}
for label in label_list:
empty_sample_label_prob_dict[label] = 0.0
for t_d in test_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
sample_label_prob_dict_list.append(sample_label_prob_dict)
acc = cal_accuracy(sample_label_prob_dict_list, test_label_list)
acc_on_2 = cal_accuracy_on_2(sample_label_prob_dict_list, test_label_list)
acc_on_3 = cal_accuracy_on_3(sample_label_prob_dict_list, test_label_list)
kappa = cal_kappa(sample_label_prob_dict_list, test_label_list)
print('Decision Tree with no pruning: Accuracy on 1 = {0}, Accuracy on 2 = {1}, Accuracy on 3 = {2}, Kappa={3}'.format(
acc, acc_on_2, acc_on_3, kappa))
# training_dataset, validation_dataset, test_dataset = get_T_V_T_dataset(file_path='../../datasets/ml_datasets/normed')
# load_dt_no_pruning(training_dataset, validation_dataset, test_dataset, label_list=[2,4,5,6,7,8,9])
# Decision Tree with no pruning: Accuracy on 1 = 0.4945054945054945, Accuracy on 2 = 0.5164835164835165,
# Accuracy on 3 = 0.6923076923076923, Kappa=0.3706209592542475
# --------------------- 1-No Pruning ---------------------
"""
EA-Revise
用于产生EA-Revise时要求的结果, DT在GS阶段只有 【no pruning / posterior pruning】,在文中没提到DT的GS结果,只需计算Final res
而 DT 的 final config 是 no pruning,在 tr+va 上训练,在te上测试
"""
def dtFinalRes():
# load dt model
dt = load_node(file_name='2020_04_11_dt_no_pruning_pickle.file', file_path='dt_res')
# Test the performance(accuracy, kappa) of DT-final on TrVa-dataset
trVaSample_label_prob_dict_list = []
teSample_label_prob_dict_list = []
empty_sample_label_prob_dict = {}
for label in label_list:
empty_sample_label_prob_dict[label] = 0.0
# tested on trVa-dataset
for t_d in tr_va_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
trVaSample_label_prob_dict_list.append(sample_label_prob_dict)
# tested on te-dataset
for t_d in test_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
teSample_label_prob_dict_list.append(sample_label_prob_dict)
trVaAcc = cal_accuracy(trVaSample_label_prob_dict_list, tr_va_label_list)
trVaKappa = cal_kappa(trVaSample_label_prob_dict_list, tr_va_label_list)
teAcc = cal_accuracy(teSample_label_prob_dict_list, te_label_list)
teKappa = cal_kappa(teSample_label_prob_dict_list, te_label_list)
print('Final res: trVaAcc={0}, trVaKappa={1},trVaAK={2},teAcc={3},teKappa={4},teAK={5}'.format(
trVaAcc, trVaKappa, trVaAcc+trVaKappa,
teAcc,teKappa,teAcc+teKappa
))
# dtFinalRes()
"""
node = pickle.load(file) ModuleNotFoundError: No module named 'ml_sl'
Final res:
trVaAcc=0.9163568773234201, trVaKappa=0.897055384288296, trVaAK=1.813412261611716,
teAcc=0.4945054945054945, teKappa=0.3706209592542475, teAK=0.8651264537597421
"""
# --------------------- 2-Pruning ---------------------
def dt_pruning(training_dataset, validation_dataset, test_dataset, label_list=[2,4,5,6,7,8,9]):
reformed_tr_dataset_list = reform_labeled_dataset_list(training_dataset)
# 2.1- Use training-dataset to train a Decision Tree, DT
dt = Node(reformed_labeled_dataset_list=reformed_tr_dataset_list, level=0)
dt.create_child_node()
# 2.2- Use validation-dataset to prune DT1
dt.post_pruning_1(reform_labeled_dataset_list(validation_dataset))
# 2.3- save model
dt_file_name = get_date_prefix() + 'dt_pruning_pickle.file'
save_node(node=dt, file_name=dt_file_name)
# 2.4- Test the performance(accuracy, kappa) of DT on test-dataset
test_label_list = [t[0] for t in test_dataset]
sample_label_prob_dict_list = []
empty_sample_label_prob_dict = {}
for label in label_list:
empty_sample_label_prob_dict[label] = 0.0
for t_d in test_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
sample_label_prob_dict_list.append(sample_label_prob_dict)
acc = cal_accuracy(sample_label_prob_dict_list, test_label_list)
kappa = cal_kappa(sample_label_prob_dict_list, test_label_list)
return acc, kappa
# if __name__ == '__main__':
# training_dataset, validation_dataset, test_dataset = get_T_V_T_dataset(file_path='../../datasets/ml_datasets/normed')
# acc, kappa = dt_pruning(training_dataset, validation_dataset, test_dataset, label_list=[2, 4, 5, 6, 7, 8, 9])
# print('Accuracy: {0}, Kappa: {1}'.format(acc, kappa))
"""
1- Accuracy: 0.4835164835164835, Kappa: 0.3591549295774648
2- Accuracy: 0.4835164835164835, Kappa: 0.3591549295774648
"""
def load_dt_pruning(test_dataset, label_list=[2,4,5,6,7,8,9]):
dt = load_node(file_name='2020_04_11_dt_pruning_pickle_1.file', file_path='dt_res')
# 2.4- Test the performance(accuracy, kappa) of DT on test-dataset
test_label_list = [t[0] for t in test_dataset]
sample_label_prob_dict_list = []
empty_sample_label_prob_dict = {}
for label in label_list:
empty_sample_label_prob_dict[label] = 0.0
for t_d in test_dataset:
t_d = single_point_list_2_list(t_d[1])
pre = dt.classify(unlabeled_data_list=t_d)
sample_label_prob_dict = copy.deepcopy(empty_sample_label_prob_dict)
sample_label_prob_dict[pre] += 1
sample_label_prob_dict_list.append(sample_label_prob_dict)
acc = cal_accuracy(sample_label_prob_dict_list, test_label_list)
acc_on_2 = cal_accuracy_on_2(sample_label_prob_dict_list, test_label_list)
acc_on_3 = cal_accuracy_on_3(sample_label_prob_dict_list, test_label_list)
kappa = cal_kappa(sample_label_prob_dict_list, test_label_list)
print('Decision Tree with pruning: Accuracy on 1 = {0}, Accuracy on 2 = {1}, Accuracy on 3 = {2}, Kappa={3}'.format(
acc, acc_on_2, acc_on_3, kappa))
# training_dataset, validation_dataset, test_dataset = get_T_V_T_dataset(file_path='../../datasets/ml_datasets/normed')
# load_dt_pruning(test_dataset, label_list=[2,4,5,6,7,8,9])
# Decision Tree with pruning: Accuracy on 1 = 0.4835164835164835, Accuracy on 2 = 0.5054945054945055,
# Accuracy on 3 = 0.6703296703296703, Kappa = 0.3591549295774648
# --------------------- 2-Pruning ---------------------
| 46.961373
| 123
| 0.732681
| 1,683
| 10,942
| 4.346405
| 0.088532
| 0.076282
| 0.110185
| 0.14026
| 0.768694
| 0.737116
| 0.669993
| 0.624607
| 0.594395
| 0.566644
| 0
| 0.070117
| 0.154085
| 10,942
| 233
| 124
| 46.961373
| 0.720182
| 0.244928
| 0
| 0.639344
| 0
| 0.02459
| 0.06747
| 0.033527
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040984
| false
| 0
| 0.057377
| 0
| 0.114754
| 0.02459
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef2afd3b3d3cc23390816b111f6a8ec32454a594
| 486
|
py
|
Python
|
setup.py
|
fmaida/caro-diario
|
adc5018f2ef716b49db39aa9189ab1e803fcd357
|
[
"MIT"
] | null | null | null |
setup.py
|
fmaida/caro-diario
|
adc5018f2ef716b49db39aa9189ab1e803fcd357
|
[
"MIT"
] | null | null | null |
setup.py
|
fmaida/caro-diario
|
adc5018f2ef716b49db39aa9189ab1e803fcd357
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
setup(
name = 'caro-diario',
packages = ['caro-diario'], # this must be the same as the name above
version = '0.1',
description = 'Diario',
author = 'Francesco Maida',
author_email = '[email protected]',
url = 'https://github.com/fmaida/caro-diario.git', # use the URL to the github repo
download_url = '', # I'll explain this in a second
keywords = ['diario', 'logging', 'esempio'], # arbitrary keywords
classifiers = [],
)
| 34.714286
| 85
| 0.67284
| 66
| 486
| 4.924242
| 0.69697
| 0.092308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005013
| 0.179012
| 486
| 13
| 86
| 37.384615
| 0.809524
| 0.244856
| 0
| 0
| 0
| 0
| 0.364641
| 0.069061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef3d7706ee027142a3cc848598e7a4e1a2e3f600
| 1,718
|
py
|
Python
|
utils/storage/redisPSCO/python/storage/storage_object.py
|
TANGO-Project/compss-tango
|
d9e007b6fe4f8337d4f267f95f383d8962602ab8
|
[
"Apache-2.0"
] | 3
|
2018-03-05T14:52:22.000Z
|
2019-02-08T09:58:24.000Z
|
utils/storage/redisPSCO/python/storage/storage_object.py
|
TANGO-Project/compss-tango
|
d9e007b6fe4f8337d4f267f95f383d8962602ab8
|
[
"Apache-2.0"
] | null | null | null |
utils/storage/redisPSCO/python/storage/storage_object.py
|
TANGO-Project/compss-tango
|
d9e007b6fe4f8337d4f267f95f383d8962602ab8
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2017 Barcelona Supercomputing Center (www.bsc.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''Redis Storage Object implementation for the PyCOMPSs Python Binding
@author: srodrig1
'''
import uuid
import storage.api
class storage_object(object):
'''Storage Object
'''
def __init__(self):
'''Constructor method
'''
# Id will be None until persisted
self.pycompss_psco_identifier = None
def makePersistent(self, identifier = None):
'''Stores the object in the Redis database
'''
storage.api.makePersistent(self, identifier)
def make_persistent(self, identifier = None):
'''Support for underscore notation
'''
self.makePersistent(identifier)
def deletePersistent(self):
'''Deletes the object from the Redis database
'''
storage.api.deletePersistent(self)
def delete_persistent(self):
'''Support for underscore notation
'''
self.deletePersistent()
def getID(self):
'''Gets the ID of the object
'''
return self.pycompss_psco_identifier
'''Add support for camelCase
'''
StorageObject = storage_object
| 28.163934
| 75
| 0.679278
| 207
| 1,718
| 5.57971
| 0.531401
| 0.051948
| 0.022511
| 0.027706
| 0.100433
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00686
| 0.236321
| 1,718
| 60
| 76
| 28.633333
| 0.873476
| 0.568102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ef4f605e514f18c935ef699c3ca9417a54b457c9
| 2,465
|
py
|
Python
|
apollo/auth.py
|
sorinbiriescu/Apollo_backend
|
b6fb68a26487a138e7efd691e7fdffaa5042a155
|
[
"Apache-2.0"
] | null | null | null |
apollo/auth.py
|
sorinbiriescu/Apollo_backend
|
b6fb68a26487a138e7efd691e7fdffaa5042a155
|
[
"Apache-2.0"
] | null | null | null |
apollo/auth.py
|
sorinbiriescu/Apollo_backend
|
b6fb68a26487a138e7efd691e7fdffaa5042a155
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime, timedelta
from typing import Optional
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from jose import JWTError, jwt
from passlib.context import CryptContext
from apollo.crud import query_first_user
from apollo.main import site_settings
from apollo.schemas import TokenData, UserModel
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/token")
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def verify_password(plain_password, hashed_password):
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password):
return pwd_context.hash(password)
def get_user(username: str):
user = query_first_user(username)
if user:
return UserModel.from_orm(user)
def authenticate_user(username: str, password: str):
user = query_first_user(username)
if not user:
return False
if not verify_password(password, user.password):
return False
return user
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, site_settings.SECRET_KEY, algorithm = site_settings.ALGORITHM)
return encoded_jwt
async def get_current_user(token: str = Depends(oauth2_scheme)):
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, site_settings.SECRET_KEY, algorithms=[site_settings.ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise credentials_exception
token_data = TokenData(username=username)
except JWTError:
raise credentials_exception
user = get_user(username = token_data.username)
if user is None:
raise credentials_exception
return user
async def get_current_active_user(current_user: UserModel = Depends(get_current_user)):
if current_user == 1:
raise HTTPException(status_code=400, detail="Inactive user")
return current_user
| 30.060976
| 103
| 0.710345
| 291
| 2,465
| 5.810997
| 0.347079
| 0.035482
| 0.024837
| 0.031934
| 0.073329
| 0.036665
| 0.036665
| 0
| 0
| 0
| 0
| 0.006701
| 0.212982
| 2,465
| 81
| 104
| 30.432099
| 0.864948
| 0
| 0
| 0.157895
| 0
| 0
| 0.037783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087719
| false
| 0.157895
| 0.157895
| 0.035088
| 0.403509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ef68897796bf15cfbe41f5e79ff37ee0aa7a33e6
| 3,578
|
py
|
Python
|
src/python/DipSimUtilities.py
|
ndeybach/DipSim
|
091f147f933b000b6ab829ec7d10eef985c260b2
|
[
"MIT"
] | null | null | null |
src/python/DipSimUtilities.py
|
ndeybach/DipSim
|
091f147f933b000b6ab829ec7d10eef985c260b2
|
[
"MIT"
] | null | null | null |
src/python/DipSimUtilities.py
|
ndeybach/DipSim
|
091f147f933b000b6ab829ec7d10eef985c260b2
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""
MIT License
Copyright (c) 2020 Nils DEYBACH & Léo OUDART
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
It serves as a containers for various utility functions. They can be useful in a multitude of cases.
"""
from math import cos, sin, radians, degrees, acos, atan2, pi
from PySide2.QtCore import QRandomGenerator
from PySide2.QtGui import QVector3D, QColor, QQuaternion
######## NUMBER GENERATION #########
"""
Return randomly -1 or 1 as a random sign generator.
"""
def randomSignGenerator():
rndNum = QRandomGenerator.global_().bounded(0, 2)
if(rndNum == 0):
return -1.0
else: return 1.0
######## ANGLES CONVERTIONS #########
"""
Returns rotated quaternion from a rotation (theta) applied to original
direction around specified axis.
"""
def quaternionfromAxisAndAngle(theta, qvector3D=QVector3D(0, 0, 0)):
provVect = (qvector3D.normalized())
s = sin(radians(theta/2))
directionRot = QVector3D(s*provVect.x(), s*provVect.y(), s*provVect.z())
quat = QQuaternion(cos(radians(theta/2)), directionRot.x(), directionRot.y(), directionRot.z())
return quat
"""
Returns quaternion rotation from spherical position (following physics convention) with
a (1,0,0) oriention initialy.
phi, theta: angles in physics convention in degrees.
"""
def anglesSphToQuaternion(phi, theta):
x = sin(radians(theta))*cos(radians(phi))
y = sin(radians(theta))*sin(radians(phi))
z = cos(radians(theta))
fromVec = QVector3D(1, 0, 0)
toVec = QVector3D(x, y, z)
return QQuaternion.rotationTo(fromVec, toVec)
"""
Returns orientation (following physics convention) to a quaternion representing the rotation
needed to get a vector to follow the orientation
"""
def anglesQuaternionToSph(quaternion):
fromVect = QVector3D(1, 0, 0)
toVect = quaternion.rotatedVector(fromVect)
phi = atan2(toVect.y(), toVect.x())
theta = acos(toVect.z()/toVect.length())
return [phi, theta]
######## COLORS #########
def quaternionToColor(quaternion):
sphAngles = anglesQuaternionToSph(quaternion)
return angleSphToColor(sphAngles[0], sphAngles[1])
"""
Returns a color from a 3D vector of angles.
phi, theta: angles in physics convention in radians.
"""
def angleSphToColor(phi, theta):
return QColor.fromHsl(degrees(phi)%360, 255, (degrees(pi - theta)%181)*255/180)
"""
Returns a random color.
"""
def rndColorGenerator():
return QColor(QRandomGenerator.global_().bounded(0, 256), QRandomGenerator.global_().bounded(0, 256), QRandomGenerator.global_().bounded(0, 256))
| 35.425743
| 149
| 0.734768
| 486
| 3,578
| 5.401235
| 0.423868
| 0.033524
| 0.04419
| 0.045714
| 0.064381
| 0.064381
| 0.064381
| 0.037714
| 0.037714
| 0.037714
| 0
| 0.023194
| 0.156512
| 3,578
| 100
| 150
| 35.78
| 0.846587
| 0.329793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205882
| false
| 0
| 0.088235
| 0.058824
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
322854f1b6ad1bef2a63f035b0bf9ea507c22498
| 5,537
|
py
|
Python
|
src/main.py
|
ronikleyton/script-backup-switch-huawei
|
80c990afa3561c350823cb96e25174262d8d4ab1
|
[
"MIT"
] | null | null | null |
src/main.py
|
ronikleyton/script-backup-switch-huawei
|
80c990afa3561c350823cb96e25174262d8d4ab1
|
[
"MIT"
] | null | null | null |
src/main.py
|
ronikleyton/script-backup-switch-huawei
|
80c990afa3561c350823cb96e25174262d8d4ab1
|
[
"MIT"
] | null | null | null |
from telnetlib import Telnet
from exception.exceptions import *
from datetime import date
import time
import os
from dotenv import load_dotenv
import json
load_dotenv()
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
f = open(f'{ROOT_DIR}/equipamentos.json')
equipamentos = json.load(f)['equipamentos']
def main(equipamento):
IP_SERVER_FTP = os.environ.get('IP_SERVER_FTP')
USER_FTP = os.environ.get('USER_FTP')
PASS_FTP = os.environ.get('PASS_FTP')
data_atual = date.today()
data_em_texto ="{}-{}-{}".format(data_atual.day, data_atual.month,data_atual.year)
r = '\r'
r = r.encode('ascii')
try:
equipamento.connection = Telnet(equipamento.ip, equipamento.port)
# Realizando Login
index, match_obj, text = equipamento.connection.expect(["Username:".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError(f"Falha na conexão, EQUIPAMENTO RESPONSE: {text}")
equipamento.connection.write(f"{equipamento.user}\r".encode('latin-1'))
index, match_obj, text = equipamento.connection.expect(["Password:".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError(f"Falha no usuário, EQUIPAMENTO RESPONSE: {text}")
equipamento.connection.write(f"{equipamento.password}\r".encode('latin-1'))
index, match_obj, text = equipamento.connection.expect([">".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao informar a senha")
equipamento.connection.write(b"save\r")
equipamento.connection.write(b"Y\r")
index, match_obj, text = equipamento.connection.expect([">".encode('latin-1')], timeout=2)
print("Acessou o switch.")
time.sleep(3)
index, match_obj, text = equipamento.connection.expect([">".encode('latin-1')], timeout=2)
ftp = "ftp -a %s %s"%(equipamento.ip,IP_SERVER_FTP)
ftp = ftp.encode('ascii')
equipamento.connection.write(ftp + r)
index, match_obj, text = equipamento.connection.expect([":".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao executar comando de conectar no ftp ")
equipamento.connection.write(USER_FTP.encode('ascii') + r)
index, match_obj, text = equipamento.connection.expect(["password:".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao Acessar o FTP-SERVER verifique a conexão e credenciais")
equipamento.connection.write(PASS_FTP.encode('ascii') + r)
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao Acessar o FTP-SERVER")
equipamento.connection.write(b"binary\r")
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao mudar ftp para binary")
equipamento.connection.write(b"cd backups\r")
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao entrar na pasta Backups")
equipamento.connection.write(b"cd huawei\r")
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao Entrar na pasta huawei")
criarPasta = "mkdir %s"%(equipamento.hostname)
criarPasta = criarPasta.encode('ascii')
equipamento.connection.write(criarPasta + r)
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao Entrar na pasta huawei")
pasta = "cd %s"%(equipamento.hostname)
pasta = pasta.encode('ascii')
equipamento.connection.write(pasta + r)
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao Entrar na pasta do switch")
put = "put vrpcfg.zip vrpcfg-%s.zip"%(data_em_texto)
put = put.encode('ascii')
equipamento.connection.write(put + r)
index, match_obj, text = equipamento.connection.expect(["[ftp]".encode('latin-1')], timeout=2)
if not match_obj:
raise CommandError("Falha ao salvar o arquivo de configuração no servidor.")
time.sleep(1.5)
#print (equipamento.connection.read_eager())
#print (equipamento.connection.read_all())
print('BackupFinalizado')
equipamento.connection.close()
except:
equipamento.connection.close()
raise ConnectionError()
class Equipamento:
def __init__(self,hostname, ip,port, user, password):
self.connection = None
self.hostname = hostname
self.ip = ip
self.port = port
self.user = user
self.password = password
for switch in equipamentos:
try:
USER = os.environ.get('USER')
PASS = os.environ.get('PASS')
PORT_TELNET = os.environ.get('PORT_TELNET')
print(f"Iniciando Backup no Switch {switch['hostname']}")
equipamento = Equipamento(switch['hostname'],switch['ip'],PORT_TELNET,USER,PASS)
main(equipamento)
except:
pass
| 35.722581
| 106
| 0.641683
| 677
| 5,537
| 5.155096
| 0.189069
| 0.19255
| 0.114613
| 0.068195
| 0.534384
| 0.475358
| 0.475358
| 0.462751
| 0.427794
| 0.427794
| 0
| 0.007696
| 0.225573
| 5,537
| 155
| 107
| 35.722581
| 0.806203
| 0.01806
| 0
| 0.283019
| 0
| 0
| 0.184763
| 0.009569
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0.09434
| 0.066038
| 0
| 0.09434
| 0.028302
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
3229bb9f7088946e3efcc3fcbb6cba8d90bd5930
| 4,329
|
py
|
Python
|
models/show.py
|
wanderindev/fyyur
|
acf3a44ce7fae6b24576a320afd447c0595d76e5
|
[
"MIT"
] | null | null | null |
models/show.py
|
wanderindev/fyyur
|
acf3a44ce7fae6b24576a320afd447c0595d76e5
|
[
"MIT"
] | null | null | null |
models/show.py
|
wanderindev/fyyur
|
acf3a44ce7fae6b24576a320afd447c0595d76e5
|
[
"MIT"
] | 2
|
2020-07-16T22:02:13.000Z
|
2020-11-22T21:16:28.000Z
|
from datetime import datetime
from sqlalchemy import or_
from app import db
from .mixin import ModelMixin
class Show(db.Model, ModelMixin):
__tablename__ = "shows"
id = db.Column(db.Integer, primary_key=True)
start_time = db.Column(db.DateTime, nullable=False)
artist_id = db.Column(
db.Integer, db.ForeignKey("artists.id"), nullable=False
)
venue_id = db.Column(
db.Integer, db.ForeignKey("venues.id"), nullable=False
)
def __init__(self, **kwargs):
super(Show, self).__init__(**kwargs)
@classmethod
def upcoming_shows_by_venue(cls, _venue_id):
shows = cls.query.filter(
cls.venue_id == _venue_id, Show.start_time > datetime.now()
).all()
return [
{
"artist_id": show.artist.id,
"artist_name": show.artist.name,
"artist_image_link": show.artist.image_link,
"start_time": show.start_time.isoformat(),
}
for show in shows
]
@classmethod
def past_shows_by_venue(cls, _venue_id):
shows = cls.query.filter(
cls.venue_id == _venue_id, Show.start_time < datetime.now()
).all()
return [
{
"artist_id": show.artist.id,
"artist_name": show.artist.name,
"artist_image_link": show.artist.image_link,
"start_time": show.start_time.isoformat(),
}
for show in shows
]
@classmethod
def upcoming_shows_by_artist(cls, _artist_id):
shows = cls.query.filter(
cls.artist_id == _artist_id, Show.start_time > datetime.now()
).all()
return [
{
"venue_id": show.venue.id,
"venue_name": show.venue.name,
"venue_image_link": show.venue.image_link,
"start_time": show.start_time.isoformat(),
}
for show in shows
]
@classmethod
def past_shows_by_artist(cls, _artist_id):
shows = cls.query.filter(
cls.artist_id == _artist_id, Show.start_time < datetime.now()
).all()
return [
{
"venue_id": show.venue.id,
"venue_name": show.venue.name,
"venue_image_link": show.venue.image_link,
"start_time": show.start_time.isoformat(),
}
for show in shows
]
@classmethod
def get_by_id(cls, _id):
return cls.query.filter_by(id=_id).first()
@classmethod
def get_show(cls, _id):
show = cls.get_by_id(_id)
return {
"venue_id": show.venue.id,
"venue_name": show.venue.name,
"artist_id": show.artist.id,
"artist_name": show.artist.name,
"artist_image_link": show.artist.image_link,
"start_time": show.start_time.isoformat(),
}
@classmethod
def get_shows(cls):
return [
{
"venue_id": show.venue.id,
"venue_name": show.venue.name,
"artist_id": show.artist.id,
"artist_name": show.artist.name,
"artist_image_link": show.artist.image_link,
"start_time": show.start_time.isoformat(),
}
for show in cls.query.all()
]
@classmethod
def search(cls, search_term):
from .artist import Artist
from .venue import Venue
shows = (
cls.query.join(Venue)
.join(Artist)
.filter(
or_(
Venue.name.ilike(f"%{search_term}%"),
Artist.name.ilike(f"%{search_term}%"),
)
)
.all()
)
return {
"data": [
{
"id": show.id,
"venue_name": show.venue.name,
"artist_name": show.artist.name,
"start_time": show.start_time,
}
for show in shows
],
"count": len(shows),
}
| 30.921429
| 74
| 0.495033
| 449
| 4,329
| 4.510022
| 0.13363
| 0.084444
| 0.070617
| 0.062222
| 0.719012
| 0.655309
| 0.655309
| 0.609877
| 0.609877
| 0.609877
| 0
| 0
| 0.396396
| 4,329
| 139
| 75
| 31.143885
| 0.774971
| 0
| 0
| 0.460317
| 0
| 0
| 0.097375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.047619
| 0.015873
| 0.230159
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
322bb4e6bc6b91b44404b73d00ac6be4830c39c7
| 658
|
py
|
Python
|
01_Hello_PGP/solution.py
|
3-24/id0-rsa.pub
|
633e974a330d0dc09d37e423168974b7fba69830
|
[
"MIT"
] | 1
|
2020-03-29T16:10:54.000Z
|
2020-03-29T16:10:54.000Z
|
01_Hello_PGP/solution.py
|
3-24/id0-rsa.pub
|
633e974a330d0dc09d37e423168974b7fba69830
|
[
"MIT"
] | null | null | null |
01_Hello_PGP/solution.py
|
3-24/id0-rsa.pub
|
633e974a330d0dc09d37e423168974b7fba69830
|
[
"MIT"
] | null | null | null |
from subprocess import run, PIPE
def check(password,filedata):
print("Trying passphrase={:s}".format(password))
cmd = run("gpg --pinentry-mode loopback --passphrase '{:s}' -d {:s}".format(password,filedata), shell=True, stdout=PIPE)
if cmd.returncode == 0:
output = cmd.stdout.decode('utf-8')
print('plaintext:')
print(output)
return True
else:
return False
def main():
f = open('/usr/share/dict/words','r')
lines = f.readlines()
for word in lines:
if "'" in word:
continue
word = word.strip()
if check(word,'message.txt'):
break
main()
| 24.37037
| 124
| 0.575988
| 81
| 658
| 4.679012
| 0.641975
| 0.084433
| 0.079156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004175
| 0.272036
| 658
| 26
| 125
| 25.307692
| 0.787056
| 0
| 0
| 0
| 0
| 0
| 0.193009
| 0.031915
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0.142857
| 0.047619
| 0
| 0.238095
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
32426c09b1bd20f92239fee3f6494dab7ae72789
| 2,477
|
py
|
Python
|
BASS_2_OM_testOnSyntheticData.py
|
oliviermirat/BASS
|
fe595fdc60795b09bb6c264b6da914a6e8e0c415
|
[
"MIT"
] | 1
|
2020-10-10T11:20:32.000Z
|
2020-10-10T11:20:32.000Z
|
BASS_2_OM_testOnSyntheticData.py
|
oliviermirat/BASS
|
fe595fdc60795b09bb6c264b6da914a6e8e0c415
|
[
"MIT"
] | null | null | null |
BASS_2_OM_testOnSyntheticData.py
|
oliviermirat/BASS
|
fe595fdc60795b09bb6c264b6da914a6e8e0c415
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(1, './GR_BASS/BASS_only_original/')
sys.path.insert(1, './GR_BASS/')
import bass as md
import numpy as np
import sys
import bassLibrary as bl
# BASS algorithm parameters
eps = 0.1
p_d = 0.2
Jthr = 0.15
seed = 0
# Creating synthetic data for process BASS on and to learn the GMM model
nbClasses = 5
classNames = ['a', 'b', 'c', 'd', 'e']
nbInstDataAnalyze = 4000
probElemDictAppear = 0.05
[dataToAnalyze1, dataForLearn] = bl.createSyntheticDataSet(nbClasses, nbInstDataAnalyze, [[3, 2, 1, 0], [0, 1, 2, 3]], [probElemDictAppear, probElemDictAppear])
l = int(len(dataToAnalyze1)/4)
lengths_data1 = np.array([l, l, l, l])
# Learning the model with the data previously created
model_fit = md.GMM_model(nbClasses)
model_fit.solve(dataForLearn)
# Launch BASS on the synthetic data previously created
posteriorProb1 = bl.getPosteriorProbabilities(dataToAnalyze1, lengths_data1, model_fit)
[P_w1, nbInstances1, w_dict1] = bl.launchBASS(posteriorProb1, lengths_data1, model_fit, eps, p_d, Jthr, seed)
[transmat_, stationary_probs_, a, b, c] = bl.launchMarkovianCompare(posteriorProb1, lengths_data1, model_fit, eps, p_d, Jthr, seed, w_dict1, classNames, 0, {'nameOfFile' : 'syntheticDataTest'})
# Comparing different dataset with different amounts of insertions
for idx, probElemDictAppear2 in enumerate([0.1, 0.05]):
print("Comparing two different dataset with SAME amounts of insertions. Probability: ", probElemDictAppear2)
[dataToAnalyze2, dataForLearn2] = bl.createSyntheticDataSet(nbClasses, nbInstDataAnalyze, [[3, 2, 1, 0], [0, 1, 2, 3]], [probElemDictAppear2, probElemDictAppear2])
l = int(len(dataToAnalyze2)/4)
lengths_data2 = np.array([l, l, l, l])
posteriorProb2 = bl.getPosteriorProbabilities(dataToAnalyze2, lengths_data2, model_fit)
[P_w2, nbInstances2, w_dict2] = bl.launchBASS(posteriorProb2, lengths_data2, model_fit, eps, p_d, Jthr, seed)
w_thr = 1e-4
p_ins = 0.2
mu = 1.0
H_beta_fac = 0
Sigma = dataToAnalyze1.shape[1]
std = 0.05
params = np.array([eps,p_d,p_ins, mu, w_thr,H_beta_fac, Jthr, Sigma, std], dtype =float)
bl.compareTwoBASSresults(w_dict1, w_dict2, params, model_fit, dataToAnalyze1, lengths_data1, dataToAnalyze2, lengths_data2, {'nameOfFile' : 'syntheticDataTest'}, classNames, str(idx)) # TODO: change compareTwoBASSresults for it to accept the posterior probabilities posteriorProb1 and posteriorProb2 instead of the data dataToAnalyze1 and dataToAnalyze2
| 43.45614
| 355
| 0.749697
| 340
| 2,477
| 5.329412
| 0.358824
| 0.03532
| 0.006623
| 0.033113
| 0.162804
| 0.162804
| 0.128587
| 0.128587
| 0.115894
| 0.115894
| 0
| 0.044007
| 0.137667
| 2,477
| 56
| 356
| 44.232143
| 0.804307
| 0.175212
| 0
| 0.052632
| 0
| 0
| 0.086486
| 0.014251
| 0
| 0
| 0
| 0.017857
| 0
| 1
| 0
| false
| 0
| 0.131579
| 0
| 0.131579
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3248e7edee7a47a71c97765cef8dd8859b78769c
| 3,698
|
py
|
Python
|
test/test_grid_to_triple.py
|
NCAR/geocat-f2py
|
fee07e680f61ca2ebfbb33f1554d9d85271fa32a
|
[
"Apache-2.0"
] | 4
|
2021-02-20T20:02:11.000Z
|
2021-11-24T13:35:32.000Z
|
test/test_grid_to_triple.py
|
NCAR/geocat-f2py
|
fee07e680f61ca2ebfbb33f1554d9d85271fa32a
|
[
"Apache-2.0"
] | 27
|
2020-12-07T17:00:05.000Z
|
2022-03-24T16:42:17.000Z
|
test/test_grid_to_triple.py
|
NCAR/geocat-f2py
|
fee07e680f61ca2ebfbb33f1554d9d85271fa32a
|
[
"Apache-2.0"
] | 4
|
2021-01-07T01:50:11.000Z
|
2021-07-07T13:05:42.000Z
|
import sys
import unittest as ut
import numpy as np
import xarray as xr
# Import from directory structure if coverage test, or from installed
# packages otherwise
if "--cov" in str(sys.argv):
from src.geocat.f2py import grid_to_triple
else:
from geocat.f2py import grid_to_triple
# Size of the grids
ny = 2
mx = 3
# Nominal input
data = np.asarray([2.740655, 2.745848, 4.893587, 2.965059, 1.707929,
0.746007]).reshape((ny, mx))
# Missing value = np.nan input
data_nan = data.copy()
data_nan[0, 1] = np.nan
data_nan[1, 2] = np.nan
# Missing value = -99 input
data_msg = data_nan.copy()
data_msg[np.isnan(data_msg)] = -99
# Coordinates
x = np.asarray([1.0, 3.0, 5.0])
y = np.asarray([2.0, 4.0])
# Expected output
out_expected = np.asarray([1, 3, 5, 1, 3, 5, 2, 2, 2, 4, 4, 4, 2.740655, 2.745848, 4.893587, 2.965059, 1.707929, 0.746007])\
.reshape((3, ny * mx))
out_expected_msg = np.asarray([1, 5, 1, 3, 2, 2, 4, 4, 2.740655, 4.893587, 2.965059, 1.707929])\
.reshape((3, 4))
class Test_grid_to_triple_float64(ut.TestCase):
def test_grid_to_triple_float64(self):
out = grid_to_triple(data, x, y)
np.testing.assert_array_equal(out_expected, out.values)
def test_grid_to_triple_float64_xr(self):
data_xr = xr.DataArray(
data,
coords={
'lat': y,
'lon': x,
},
dims=['lat', 'lon'],
)
out = grid_to_triple(data_xr, x, y)
np.testing.assert_array_equal(out_expected, out.values)
def test_grid_to_triple_float64_xr_x_y(self):
data_xr = xr.DataArray(data)
out = grid_to_triple(data_xr, x, y)
np.testing.assert_array_equal(out_expected, out.values)
def test_grid_to_triple_float64_nan(self):
out = grid_to_triple(data_nan, x, y)
np.testing.assert_array_equal(out_expected_msg, out.values)
def test_grid_to_triple_float64_nan_2(self):
out = grid_to_triple(data_nan, x, y, msg_py=np.nan)
np.testing.assert_array_equal(out_expected_msg, out.values)
def test_grid_to_triple_float64_msg(self):
out = grid_to_triple(data_msg, x, y, msg_py=-99)
np.testing.assert_array_equal(out_expected_msg, out.values)
class Test_grid_to_triple_float32(ut.TestCase):
def test_grid_to_triple_float32(self):
data_asfloat32 = data.astype(np.float32)
out = grid_to_triple(data_asfloat32, x.astype(np.float32),
y.astype(np.float32))
np.testing.assert_array_equal(out_expected.astype(np.float32), out)
def test_grid_to_triple_float32_nan(self):
data_asfloat32_nan = data_nan.astype(np.float32)
out = grid_to_triple(data_asfloat32_nan, x.astype(np.float32),
y.astype(np.float32))
np.testing.assert_array_equal(out_expected_msg.astype(np.float32), out)
def test_grid_to_triple_float32_nan_2(self):
data_asfloat32_nan = data_nan.astype(np.float32)
out = grid_to_triple(data_asfloat32_nan,
x.astype(np.float32),
y.astype(np.float32),
msg_py=np.nan)
np.testing.assert_array_equal(out_expected_msg.astype(np.float32), out)
def test_grid_to_triple_float32_msg(self):
data_asfloat32_msg = data_msg.astype(np.float32)
out = grid_to_triple(data_asfloat32_msg,
x.astype(np.float32),
y.astype(np.float32),
msg_py=-99)
np.testing.assert_array_equal(out_expected_msg.astype(np.float32), out)
| 32.156522
| 124
| 0.635479
| 558
| 3,698
| 3.928315
| 0.15233
| 0.065693
| 0.131387
| 0.087591
| 0.742701
| 0.715785
| 0.630474
| 0.604015
| 0.604015
| 0.536496
| 0
| 0.085022
| 0.252569
| 3,698
| 114
| 125
| 32.438596
| 0.708032
| 0.054354
| 0
| 0.25
| 0
| 0
| 0.004874
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 1
| 0.131579
| false
| 0
| 0.078947
| 0
| 0.236842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3255418e552bf21eec558aa0897845fa6583a29c
| 4,984
|
py
|
Python
|
u3s2m1ass1-pt6/code/rpg_queries.py
|
LambdaTheda/lambdata-Unit3
|
b44b20f2f3e28d2b17613660ddb562afe4825686
|
[
"MIT"
] | null | null | null |
u3s2m1ass1-pt6/code/rpg_queries.py
|
LambdaTheda/lambdata-Unit3
|
b44b20f2f3e28d2b17613660ddb562afe4825686
|
[
"MIT"
] | null | null | null |
u3s2m1ass1-pt6/code/rpg_queries.py
|
LambdaTheda/lambdata-Unit3
|
b44b20f2f3e28d2b17613660ddb562afe4825686
|
[
"MIT"
] | 1
|
2020-05-11T04:33:24.000Z
|
2020-05-11T04:33:24.000Z
|
import sqlite3
import os
#DB_FILEPATH = "data/chinook.db"
DB_FILEPATH = os.path.join(os.path.dirname(__file__), "..", "data", "rpg_db.sqlite3")
conn = sqlite3.connect(DB_FILEPATH)
conn.row_factory = sqlite3.Row
print(type(conn)) #> <class 'sqlite3.Connection'>
curs = conn.cursor()
print(type(curs)) #> <class 'sqlite3.Cursor'>
query = """SELECT
count(DISTINCT character_id) as character_count
FROM charactercreator_character"""
# query1 = """SELECT
# count(DISTINCT character_ptr_id) as character_ptr_count
# FROM charactercreator_cleric"""
# query2 = """SELECT
# count(DISTINCT character_ptr_id) as character_ptr_count
# FROM charactercreator_fighter"""
# query3 = """SELECT
# count(DISTINCT character_ptr_id) as character_ptr_count
# FROM charactercreator_mage"""
# query4 = """SELECT
# count(DISTINCT character_ptr_id) as character_ptr_count
# FROM charactercreator_thief"""
queries_combined = """SELECT
count(distinct c.character_ptr_id) as total_clerics
,count(distinct f.character_ptr_id) as total_fighters
,count(distinct m.character_ptr_id) as total_mages
,count(distinct n.mage_ptr_id) as total_necromancers
,count(distinct t.character_ptr_id) as total_thieves
FROM charactercreator_character ccc
LEFT JOIN charactercreator_fighter f
ON ccc.character_id = f.character_ptr_id
LEFT JOIN charactercreator_cleric c
ON ccc.character_id= c.character_ptr_id
LEFT JOIN charactercreator_mage m
ON ccc.character_id = m.character_ptr_id
LEFT JOIN charactercreator_necromancer n
ON ccc.character_id = n.mage_ptr_id
LEFT JOIN charactercreator_thief t
ON ccc.character_id = t.character_ptr_id"""
query5 = """SELECT
count(DISTINCT item_id ) as total_item
FROM armory_item"""
query6 = """SELECT
count(DISTINCT item_ptr_id) as weapons
FROM armory_weapon"""
query7 = """SELECT
count(DISTINCT item_id) - count(DISTINCT item_ptr_id) as total_non_weapons
FROM armory_item, armory_weapon"""
query8 = """SELECT item_id
, count(DISTINCT item_id) as item
FROM charactercreator_character_inventory
GROUP BY character_id
LIMIT 20
"""
query9 = """SELECT cci.character_id
, count(DISTINCT aw.item_ptr_id) as number_of_weapons
FROM charactercreator_character_inventory as cci
LEFT JOIN armory_item as ai ON cci.item_id = ai.item_id
LEFT JOIN armory_weapon as aw ON ai.item_id = aw.item_ptr_id
GROUP BY character_id
LIMIT 20"""
query10 = """SELECT avg(total_items) as avg_items
FROM (
-- row per character = 302
SELECT
c.character_id
,c.name
--,ci.item_id
,count(distinct ci.item_id) as total_items
FROM charactercreator_character c
LEFT JOIN charactercreator_character_inventory ci
ON c.character_id = ci.character_id
GROUP BY c.character_id
) subz"""
query11 = """SELECT avg(weapon_count) as avg_weapon
FROM (
SELECT
cci.character_id
,count(DISTINCT aw.item_ptr_id) as weapon_count
FROM charactercreator_character_inventory cci
LEFT JOIN armory_item ai ON cci.item_id = ai.item_id
LEFT JOIN armory_weapon aw ON ai.item_id = aw.item_ptr_id
GROUP BY 1
) subz"""
print("----------")
result = curs.execute(query).fetchone()
print("RESULTS FOR CHARACTERCREATOR_CHARACTER", result)
print(result["character_count"])
# print("-------------")
# result1 = curs.execute(query1).fetchone()
# print("Results for charactercreator_cleric", result1)
# print(result1["character_ptr_count"])
# print("---------")
# result2 = curs.execute(query2).fetchone()
# print("Results for charactercreator_fighter", result2)
# print(result2["character_ptr_count"])
# print("---------")
# result3 = curs.execute(query3).fetchone()
# print("Results for charactercreator_mage", result3)
# print(result3["character_ptr_count"])
# print('--------')
# result4 = curs.execute(query4).fetchone()
# print("Results for charactercreator_thief", result4)
# print(result4["character_ptr_count"])
# print("-------------")
# result5 = curs.execute(query5).fetchone()
# print("Results for total Items", result5)
# print(result5["total_item"])
result_queries = curs.execute(queries_combined).fetchall()
print("Results of each specific subclass", result_queries)
result6 = curs.execute(query6).fetchone()
print("Results for total weapons", result6)
print(result6["weapons"])
print("---------")
result7 = curs.execute(query7).fetchone()
print("Results for total non weapons", result7)
print(result7["total_non_weapons"])
print("---------")
result8 = curs.execute(query8).fetchall()
for rw in result8:
print(rw[0], rw[1])
print("---------")
result9 = curs.execute(query9).fetchall()
for rw in result9:
print(rw['character_id'], rw['number_of_weapons'])
print("---------")
result10 = curs.execute(query10).fetchone()
print("Average item per character", result10)
print(result10["avg_items"])
print("---------")
result11= curs.execute(query11).fetchone()
print("Average weapon per character", result11)
print(result11["avg_weapon"])
print("---------")
| 30.576687
| 85
| 0.731742
| 672
| 4,984
| 5.197917
| 0.165179
| 0.068709
| 0.026052
| 0.036645
| 0.354996
| 0.208131
| 0.149442
| 0.149442
| 0.149442
| 0.149442
| 0
| 0.018266
| 0.132223
| 4,984
| 163
| 86
| 30.576687
| 0.789364
| 0.254013
| 0
| 0.135922
| 0
| 0
| 0.685396
| 0.099349
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019417
| 0
| 0.019417
| 0.213592
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
326bc9a28ede548053a0104238484ec204f3ccb0
| 1,518
|
py
|
Python
|
macdaily/cmd/install.py
|
JarryShaw/MacDaily
|
853b841dd1f1f7e6aae7bf2c305ff008bc76055c
|
[
"BSD-3-Clause"
] | 10
|
2018-09-20T19:57:56.000Z
|
2021-11-14T18:28:10.000Z
|
macdaily/cmd/install.py
|
JarryShaw/jsdaily
|
3ca7aa7c75a12dc08ab44f78af2b089e1ed41d3d
|
[
"BSD-3-Clause"
] | 2
|
2020-05-31T08:49:47.000Z
|
2021-12-28T16:57:42.000Z
|
macdaily/cmd/install.py
|
JarryShaw/jsdaily
|
3ca7aa7c75a12dc08ab44f78af2b089e1ed41d3d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import abc
from macdaily.cls.command import Command
from macdaily.util.tools.print import print_info
class InstallCommand(Command):
@property
def cmd(self):
return 'install'
@property
def act(self):
return ('install', 'installed', 'installed')
@property
def job(self):
return ('installation', 'installation')
@property
def ignored(self):
return NotImplemented
@property
def notfound(self):
return NotImplemented
def _pkg_args(self, namespace):
"""Return if there's packages for main process."""
self._merge_packages(namespace)
self._parse_args(namespace)
self._pkgs = list()
self._fail = list()
return bool(self._packages)
def _run_proc(self):
self._pkgs = list()
self._fail = list()
for path in self._exec:
text = f'Using {self.name} executable {path!r}'
print_info(text, self._file, redirect=self._qflag)
self._var__temp_pkgs = self._packages # pylint: disable=attribute-defined-outside-init
if self._check_confirm(path):
self._proc_install(path)
else:
text = f'No {self.desc[1]} to install for executable {path!r}'
print_info(text, self._file, redirect=self._qflag)
self._proc_fixmissing(path)
self._proc_cleanup()
@abc.abstractmethod
def _proc_install(self, path):
pass
| 25.3
| 99
| 0.607378
| 174
| 1,518
| 5.091954
| 0.442529
| 0.062077
| 0.038375
| 0.036117
| 0.182844
| 0.182844
| 0.128668
| 0.128668
| 0.128668
| 0.128668
| 0
| 0.001847
| 0.286561
| 1,518
| 59
| 100
| 25.728814
| 0.816251
| 0.075099
| 0
| 0.309524
| 0
| 0
| 0.103794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.190476
| false
| 0.02381
| 0.071429
| 0.119048
| 0.428571
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
32839d586b1955e1c6b167959e736b233c1def5e
| 363
|
py
|
Python
|
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | 1
|
2022-02-22T18:39:57.000Z
|
2022-02-22T18:39:57.000Z
|
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | null | null | null |
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | null | null | null |
# import all relevant contents from the associated module.
from vandal.objects.montecarlo import (
MonteCarlo,
MCapp,
)
from vandal.objects.eoq import(
EOQ,
EOQapp,
)
from vandal.objects.dijkstra import Dijkstra
# all relevant contents.
__all__ = [
'MonteCarlo',
'EOQ',
'Dijkstra',
'MCapp',
'EOQapp',
]
| 16.5
| 59
| 0.628099
| 37
| 363
| 6.054054
| 0.405405
| 0.133929
| 0.227679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 363
| 21
| 60
| 17.285714
| 0.848485
| 0.217631
| 0
| 0
| 0
| 0
| 0.123077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1875
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
32885105782d33bbebe4c4cc904fbc2149735713
| 784
|
py
|
Python
|
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | 2
|
2017-12-02T13:58:30.000Z
|
2018-08-02T17:07:59.000Z
|
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
import os
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.flatpages.models import FlatPage
from django.test import TestCase
from django.test.utils import override_settings
class HeyDoAppTest(unittest.TestCase):
def createUser(self):
is_new, user = User.create_user(
openid="1234567890",
source=1,
nick="username",
gender=1,
ip=self.request.remote_ip,
province="",
city="",
country="",
headimgurl="",
)
#success,message = QCloudIM.account_import(user)
return is_new, user
if __name__ == '__main__':
unittest.main()
| 27.034483
| 60
| 0.632653
| 86
| 784
| 5.604651
| 0.569767
| 0.124481
| 0.105809
| 0.087137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02109
| 0.274235
| 784
| 28
| 61
| 28
| 0.826011
| 0.059949
| 0
| 0
| 0
| 0
| 0.035326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.304348
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
3291b0fa03bb75af83a902f66fc3f91285f8e9a3
| 9,147
|
py
|
Python
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 113
|
2019-03-12T19:42:39.000Z
|
2022-03-31T22:40:05.000Z
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 459
|
2019-01-25T09:32:18.000Z
|
2022-03-24T21:57:16.000Z
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 107
|
2019-01-31T15:08:34.000Z
|
2022-03-16T14:58:38.000Z
|
# -*- coding: utf-8 -*-
import json
from typing import List
from TM1py.Objects.Git import Git
from TM1py.Objects.GitCommit import GitCommit
from TM1py.Objects.GitPlan import GitPushPlan, GitPullPlan, GitPlan
from TM1py.Services.ObjectService import ObjectService
from TM1py.Services.RestService import RestService, Response
from TM1py.Utils.Utils import format_url
class GitService(ObjectService):
""" Service to interact with GIT
"""
COMMON_PARAMETERS = {'username': 'Username', 'password': 'Password', 'message': 'Message', 'author': 'Author',
'email': 'Email', 'branch': 'Branch', 'new_branch': 'NewBranch', 'force': 'Force',
'public_key': 'PublicKey', 'private_key': 'PrivateKey', 'passphrase': 'Passphrase',
'config': 'Config'}
def __init__(self, rest: RestService):
super().__init__(rest)
def git_init(self, git_url: str, deployment: str, username: str = None, password: str = None,
public_key: str = None, private_key: str = None, passphrase: str = None, force: bool = None,
config: dict = None, **kwargs) -> Git:
""" Initialize GIT service, returns Git object
:param git_url: file or http(s) path to GIT repository
:param deployment: name of selected deployment group
:param username: GIT username
:param password: GIT password
:param public_key: SSH public key, available from PAA V2.0.9.4
:param private_key: SSH private key, available from PAA V2.0.9.4
:param passphrase: Passphrase for decrypting private key, if set
:param force: reset git context on True
:param config: Dictionary containing git configuration parameters
"""
url = "/api/v1/GitInit"
body = {'URL': git_url, 'Deployment': deployment}
for key, value in locals().items():
if value is not None and key in self.COMMON_PARAMETERS.keys():
body[self.COMMON_PARAMETERS.get(key)] = value
body_json = json.dumps(body)
response = self._rest.POST(url=url, data=body_json, **kwargs)
return Git.from_dict(response.json())
def git_uninit(self, force: bool = False, **kwargs):
""" Unitialize GIT service
:param force: clean up git context when True
"""
url = "/api/v1/GitUninit"
body = json.dumps(force)
return self._rest.POST(url=url, data=body, **kwargs)
def git_status(self, username: str = None, password: str = None, public_key: str = None, private_key: str = None,
passphrase: str = None, **kwargs) -> Git:
""" Get GIT status, returns Git object
:param username: GIT username
:param password: GIT password
:param public_key: SSH public key, available from PAA V2.0.9.4
:param private_key: SSH private key, available from PAA V2.0.9.4
:param passphrase: Passphrase for decrypting private key, if set
"""
url = "/api/v1/GitStatus"
body = {}
for key, value in locals().items():
if value is not None and key in self.COMMON_PARAMETERS.keys():
body[self.COMMON_PARAMETERS.get(key)] = value
response = self._rest.POST(url=url, data=json.dumps(body), **kwargs)
return Git.from_dict(response.json())
def git_push(self, message: str, author: str, email: str, branch: str = None, new_branch: str = None,
force: bool = False, username: str = None, password: str = None, public_key: str = None,
private_key: str = None, passphrase: str = None, execute: bool = None, **kwargs) -> Response:
""" Creates a gitpush plan, returns response
:param message: Commit message
:param author: Name of commit author
:param email: Email of commit author
:param branch: The branch which last commit will be used as parent commit for new branch.
Must be empty if GIT repo is empty
:param new_branch: If specified, creates a new branch and pushes the commit onto it. If not specified,
pushes to the branch specified in "Branch"
:param force: A flag passed in for evaluating preconditions
:param username: GIT username
:param password: GIT password
:param public_key: SSH public key, available from PAA V2.0.9.4
:param private_key: SSH private key, available from PAA V2.0.9.4
:param passphrase: Passphrase for decrypting private key, if set
:param execute: Executes the plan right away if True
"""
url = "/api/v1/GitPush"
body = {}
for key, value in locals().items():
if value is not None and key in self.COMMON_PARAMETERS.keys():
body[self.COMMON_PARAMETERS.get(key)] = value
response = self._rest.POST(url=url, data=json.dumps(body), **kwargs)
if execute:
plan_id = json.loads(response.content).get('ID')
self.git_execute_plan(plan_id=plan_id)
return response
def git_pull(self, branch: str, force: bool = None, execute: bool = None, username: str = None,
password: str = None, public_key: str = None, private_key: str = None, passphrase: str = None,
**kwargs) -> Response:
""" Creates a gitpull plan, returns response
:param branch: The name of source branch
:param force: A flag passed in for evaluating preconditions
:param execute: Executes the plan right away if True
:param username: GIT username
:param password: GIT password
:param public_key: SSH public key, available from PAA V2.0.9.4
:param private_key: SSH private key, available from PAA V2.0.9.4
:param passphrase: Passphrase for decrypting private key, if set
"""
url = "/api/v1/GitPull"
body = {}
for key, value in locals().items():
if value is not None and key in self.COMMON_PARAMETERS.keys():
body[self.COMMON_PARAMETERS.get(key)] = value
body_json = json.dumps(body)
response = self._rest.POST(url=url, data=body_json, **kwargs)
if execute:
plan_id = json.loads(response.content).get('ID')
self.git_execute_plan(plan_id=plan_id)
return response
def git_execute_plan(self, plan_id: str, **kwargs) -> Response:
""" Executes a plan based on the planid
:param plan_id: GitPlan id
"""
url = format_url("/api/v1/GitPlans('{}')/tm1.Execute", plan_id)
return self._rest.POST(url=url, **kwargs)
def git_get_plans(self, **kwargs) -> List[GitPlan]:
""" Gets a list of currently available GIT plans
"""
url = "/api/v1/GitPlans"
plans = []
response = self._rest.GET(url=url, **kwargs)
# Every individual plan is wrapped in a "value" parent, iterate through those to get the actual plans
for plan in response.json().get('value'):
plan_id = plan.get('ID')
# Check if plan has an ID, sometimes there's a null in the mix that we don't want
if plan_id is None:
continue
plan_branch = plan.get('Branch')
plan_force = plan.get('Force')
# A git plan can either be a PushPlan or a PullPlan, these have slightly different variables,
# so we need to handle those differently
if plan.get('@odata.type') == '#ibm.tm1.api.v1.GitPushPlan':
plan_new_branch = plan.get('NewBranch')
plan_source_files = plan.get('SourceFiles')
new_commit = GitCommit(
commit_id=plan.get('NewCommit').get('ID'),
summary=plan.get('NewCommit').get('Summary'),
author=plan.get('NewCommit').get('Author'))
parent_commit = GitCommit(
commit_id=plan.get('ParentCommit').get('ID'),
summary=plan.get('ParentCommit').get('Summary'),
author=plan.get('ParentCommit').get('Author'))
current_plan = GitPushPlan(
plan_id=plan_id, branch=plan_branch, force=plan_force,
new_branch=plan_new_branch, new_commit=new_commit,
parent_commit=parent_commit, source_files=plan_source_files)
elif plan.get('@odata.type') == '#ibm.tm1.api.v1.GitPullPlan':
plan_commit = GitCommit(
commit_id=plan.get('Commit').get('ID'),
summary=plan.get('Commit').get('Summary'),
author=plan.get('Commit').get('Author'))
plan_operations = plan.get('Operations')
current_plan = GitPullPlan(plan_id=plan_id, branch=plan_branch, force=plan_force, commit=plan_commit,
operations=plan_operations)
else:
raise RuntimeError(f"Invalid plan detected: {plan.get('@odata.type')}")
plans.append(current_plan)
return plans
| 44.619512
| 117
| 0.608396
| 1,149
| 9,147
| 4.742385
| 0.181027
| 0.028262
| 0.014682
| 0.027895
| 0.49789
| 0.467058
| 0.445036
| 0.440264
| 0.430354
| 0.403927
| 0
| 0.007804
| 0.285558
| 9,147
| 204
| 118
| 44.838235
| 0.826014
| 0.282278
| 0
| 0.292453
| 0
| 0
| 0.101576
| 0.018365
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075472
| false
| 0.084906
| 0.075472
| 0
| 0.235849
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
32b9a1053b526032d5d6c19f20fe7c9cbc1b1859
| 5,299
|
py
|
Python
|
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | 3
|
2015-01-13T05:45:04.000Z
|
2020-01-10T19:05:35.000Z
|
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | null | null | null |
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | 6
|
2015-01-13T04:40:53.000Z
|
2021-08-13T01:07:40.000Z
|
# coding=utf-8
import random
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from notifications.models import EventType
from social_graph import EdgeType
try:
from hashlib import sha1 as sha_constructor, md5 as md5_constructor
except ImportError:
pass
#---------------------NOTIFICATIONS---------------------------------
def group_comment_event_type():
comment_event_type = cache.get('SOCIAL_NETWORK_COMMENT_EVENT_TYPE')
if comment_event_type is not None:
return comment_event_type
try:
from . import SOCIAL_GROUP_COMMENT_EVENT_TYPE_NAME
comment_event_type = EventType.objects.get(name=SOCIAL_GROUP_COMMENT_EVENT_TYPE_NAME)
cache.set('SOCIAL_NETWORK_COMMENT_EVENT_TYPE', comment_event_type)
return comment_event_type
except ObjectDoesNotExist as e:
pass # TODO Log this
def group_shared_link_event_type():
shared_link = cache.get('SOCIAL_NETWORK_SHARED_LINK_EVENT_TYPE')
if shared_link is not None:
return shared_link
try:
from . import SOCIAL_GROUP_SHARED_LINK_EVENT_TYPE_NAME
shared_link = EventType.objects.get(name=SOCIAL_GROUP_SHARED_LINK_EVENT_TYPE_NAME)
cache.set('SOCIAL_NETWORK_SHARED_LINK_EVENT_TYPE', shared_link)
return shared_link
except ObjectDoesNotExist as e:
pass # TODO Log this
def group_photo_event_type():
photo_event_type = cache.get('SOCIAL_NETWORK_PHOTO_EVENT_TYPE')
if photo_event_type is not None:
return photo_event_type
try:
from . import SOCIAL_GROUP_PHOTO_EVENT_TYPE_NAME
photo_event_type = EventType.objects.get(name=SOCIAL_GROUP_PHOTO_EVENT_TYPE_NAME)
cache.set('SOCIAL_NETWORK_PHOTO_EVENT_TYPE', photo_event_type)
return photo_event_type
except ObjectDoesNotExist as e:
pass # TODO Log this
#---------------------EDGES-----------------------------------------
def friendship_edge():
_friendship = cache.get('FRIENDSHIP_EDGE_TYPE')
if _friendship is not None:
return _friendship
try:
_friendship = EdgeType.objects.get(name="Friendship")
cache.set('FRIENDSHIP_EDGE_TYPE', _friendship)
return _friendship
except ObjectDoesNotExist as e:
pass # TODO Log this
def integrated_by_edge():
_integrated_by = cache.get('INTEGRATED_BY_EDGE_TYPE')
if _integrated_by is not None:
return _integrated_by
try:
_integrated_by = EdgeType.objects.get(name="Integrated by")
cache.set('INTEGRATED_BY_EDGE_TYPE', _integrated_by)
return _integrated_by
except ObjectDoesNotExist as e:
pass # TODO Log this
def member_of_edge():
_member_of = cache.get('MEMBER_OF_EDGE_TYPE')
if _member_of is not None:
return _member_of
try:
_member_of = EdgeType.objects.get(name="Member")
cache.set('MEMBER_OF_EDGE_TYPE', _member_of)
return _member_of
except ObjectDoesNotExist as e:
pass # TODO Log this
def follower_of_edge():
_follower_of = cache.get('FOLLOWER_OF_EDGE_TYPE')
if _follower_of is not None:
return _follower_of
try:
_follower_of = EdgeType.objects.get(name="Follower")
cache.set('FOLLOWER_OF_EDGE_TYPE', _follower_of)
return _follower_of
except ObjectDoesNotExist:
pass
def followed_by_edge():
_followed_by = cache.get('FOLLOWED_BY_EDGE_TYPE')
if _followed_by is not None:
return _followed_by
try:
_followed_by = EdgeType.objects.get(name="Followed by")
cache.set('FOLLOWED_BY_EDGE_TYPE', _followed_by)
return _followed_by
except ObjectDoesNotExist:
pass
#---------------------GENERAL-----------------------------------------
def generate_sha1(string, salt=None):
"""
Generates a sha1 hash for supplied string. Doesn't need to be very secure
because it's not used for password checking. We got Django for that.
:param string:
The string that needs to be encrypted.
:param salt:
Optionally define your own salt. If none is supplied, will use a random
string of 5 characters.
:return: Tuple containing the salt and hash.
"""
if not isinstance(string, (str, unicode)):
string = str(string)
if isinstance(string, unicode):
string = string.encode("utf-8")
if not salt:
salt = sha_constructor(str(random.random())).hexdigest()[:5]
hash = sha_constructor(salt+string).hexdigest()
return (salt, hash)
# A tuple of standard large number to their converters
intword_converters = (
(3, lambda number: _('%(value)dK')),
(6, lambda number: _('%(value)dM')),
(9, lambda number: _('%(value)dG')),
)
def intmin(value):
"""
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if value < 1000:
return value
for exponent, converter in intword_converters:
large_number = 10 ** exponent
if value < large_number * 1000:
new_value = value / large_number
tpl = "+%s" if value > large_number else "%s"
return tpl % converter(new_value) % {'value': new_value}
return value
| 31.35503
| 93
| 0.670881
| 671
| 5,299
| 4.988078
| 0.196721
| 0.072602
| 0.052584
| 0.035853
| 0.35375
| 0.291903
| 0.18046
| 0.113833
| 0.088139
| 0.047804
| 0
| 0.00534
| 0.222495
| 5,299
| 169
| 94
| 31.35503
| 0.807039
| 0.13663
| 0
| 0.380165
| 0
| 0
| 0.111406
| 0.073533
| 0
| 0
| 0
| 0.005917
| 0
| 1
| 0.082645
| false
| 0.07438
| 0.090909
| 0
| 0.347107
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
08757365d19fb16259355c3f4a0bc7a45ccc8fde
| 1,808
|
py
|
Python
|
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | null | null | null |
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | 1
|
2019-11-15T22:56:20.000Z
|
2019-11-15T22:56:20.000Z
|
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | null | null | null |
"""NeuronUnit model class for reduced neuron models"""
import numpy as np
from neo.core import AnalogSignal
import quantities as pq
import neuronunit.capabilities as cap
import neuronunit.models as mod
import neuronunit.capabilities.spike_functions as sf
from neuronunit.models import backends
from generic_network import net_sim_runner, get_dummy_synapses
class NetworkModel(cap.ReceivesCurrent,
cap.ProducesMultiMembranePotentials,
cap.ProducesSpikeRasters,
):
"""Base class for network models
todo replace receives current with receives patterned input."""
def __init__(self, name=None, backend=pyNN, synapses=None):
"""Instantiate a network model.
name: Optional model name.
"""
self.run_number = 0
self.backend = backend
self.tstop = None
self.data = None
self.vms = None
self.binary_trains = None
self.t_spike_axis = None
self.synapses = get_dummy_synapses()
try:
self.sim = generic_network.sim
except:
pass
def get_membrane_potentials(self):
return self.vms
def getSpikeRasters(self, **run_params):
return self.binary_train
def inject_noise_current(self, stim_current, syn_weights):
import pyNN.neuron as sim
noisee = sim.NoisyCurrentSource(mean=0.74/1000.0, stdev=4.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
noisei = sim.NoisyCurrentSource(mean=1.440/1000.0, stdev=4.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
stim_noise_currents = [noisee,noisei]
self.data,self.vms,self.binary_trains,self.t_spike_axis = net_sim_runner(syn_weights,sim,self.synapses,stim_noise_currents)
return (self.vms,self.binary_train,self.data)
| 35.45098
| 131
| 0.68031
| 238
| 1,808
| 5.016807
| 0.394958
| 0.033501
| 0.046901
| 0.023451
| 0.063652
| 0.063652
| 0.063652
| 0.063652
| 0.063652
| 0.063652
| 0
| 0.037518
| 0.233407
| 1,808
| 50
| 132
| 36.16
| 0.823954
| 0.107854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0
| 1
| 0.111111
| false
| 0.027778
| 0.25
| 0.055556
| 0.472222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
087e3e81767ebb79be98cf41ccb71262d3691e12
| 2,454
|
py
|
Python
|
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | 6
|
2021-03-21T16:46:44.000Z
|
2021-11-27T14:07:06.000Z
|
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | null | null | null |
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | null | null | null |
''' Various astro calcs mainly based on Meuss.
'''
import numpy as np
import math
import time
from datetime import datetime
def julian_date(when):
# from Meuss p 61; 'when' is a datetime object
y = when.year
m = when.month
d = when.day + when.hour/24 + when.minute/(24*60) + when.second/(24*3600)
if m < 3:
y -= 1
m += 12
a = int(y / 100)
if y >= 1582 and m >= 10:
# Gregorian
a = int(y/100)
b = 2 - a + int(a / 4)
else:
# Julian
b = 0
jd = int(365.25 * (y + 4716)) + int(30.6001 * (m + 1)) + d + b - 1524.5
return jd
def to_range(x, d):
# reduce x to range 0-d by adding or subtracting multiples of d
if x < 0:
return x - int((x / d) - 1) * d
else:
return x - int((x / d)) * d
def local_sidereal_time(when, longitude):
# direct method of Meuss p87
# when must be in UT
jd = julian_date(when)
t = (jd - 2451545.0) / 36525.0
mst = 280.46061837 + 360.98564736629 * (jd - 2451545.0) + .000387933 * t**2 - t**3 / 38710000
# convert to 0-360
mst = to_range(mst, 360)
# convert from Greenwich to local
lst = mst + longitude
return lst
def sun_altitude(when, latitude, longitude):
# Meuss p163+
jd = julian_date(when)
rads = math.pi / 180.
t = (jd - 2451545.0) / 36525.0
L0 = 280.46646 + 36000.76983 * t + 0.0003032 * t * t
L0 = to_range(L0, 360)
M = 357.52911 + 35999.05029 * t - 0.0001537 * t * t
#e = 0.016708634 - 0.000042037 * t - 0.0000001267 * t * t
C = (1.914602 - 0.004817 * t - 0.000014 * t * t) * np.sin(M * rads) + \
(0.019993 - 0.000101 * t) * np.sin(2 * M * rads) + \
0.000289 * np.sin(3 * M * rads)
long_sun = L0 + C
#v = M + C
# R = (1.000001018 * (1 - e * e)) / (1 + e * np.cos(v * rads))
sigma = 125.04 - 1934.136 * t
lam = long_sun - 0.00569 - 0.00478 * np.sin(sigma * rads)
ep = 23 + (26/60) + (21.448/3600) - (46.815*t + 0.00059 * t**2 - 0.001813*t**3) / 3600
ep_corr = ep + 0.00256 * np.cos(sigma * rads)
ra = np.arctan2(np.cos(ep_corr * rads) * np.sin(lam * rads), np.cos(lam * rads)) / rads
ra = to_range(ra, 360)
dec = np.arcsin(np.sin(ep_corr * rads) * np.sin(lam * rads)) / rads
# now convert to locale
ts = time.time()
utc_offset = (datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts)).total_seconds() / 3600.0
lst = local_sidereal_time(when, longitude)
lat = latitude * rads
H = (-utc_offset*15 + lst - ra) * rads
alt = np.arcsin(np.sin(lat) * np.sin(dec * rads) + np.cos(lat) * np.cos(dec * rads) * np.cos(H)) / rads
return alt
| 26.106383
| 104
| 0.600652
| 440
| 2,454
| 3.304545
| 0.370455
| 0.030949
| 0.028886
| 0.011004
| 0.111417
| 0.053645
| 0.030261
| 0
| 0
| 0
| 0
| 0.200318
| 0.231051
| 2,454
| 93
| 105
| 26.387097
| 0.570217
| 0.172372
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.232143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0880a4f7dffdc5894d94be459ed45b4d22287a7c
| 3,505
|
py
|
Python
|
tests/sql_parser/ast/test_insert_statement_is_parsed.py
|
vladbalmos/mitzasql
|
06c2a96eb4494095b2b72bc1454199a4940b0700
|
[
"MIT"
] | 69
|
2019-05-16T06:40:18.000Z
|
2022-03-24T06:23:49.000Z
|
tests/sql_parser/ast/test_insert_statement_is_parsed.py
|
vladbalmos/mitzasql
|
06c2a96eb4494095b2b72bc1454199a4940b0700
|
[
"MIT"
] | 36
|
2019-05-15T19:55:24.000Z
|
2021-07-22T07:07:14.000Z
|
tests/sql_parser/ast/test_insert_statement_is_parsed.py
|
vladbalmos/mitzasql
|
06c2a96eb4494095b2b72bc1454199a4940b0700
|
[
"MIT"
] | 8
|
2019-05-16T06:56:28.000Z
|
2022-02-11T02:24:12.000Z
|
import pytest
from mitzasql.sql_parser.parser import parse
from mitzasql.utils import dfs
def test_simple_insert_is_parsed():
raw_sql = '''
INSERT DELAYED INTO table (col1, col2, col3) VALUES (100, 200, 300)
'''
ast = parse(raw_sql)
assert len(ast) > 0
ast = ast[0]
assert ast.type == 'insert'
assert len(ast.children) == 4
modifier = ast.get_child('modifier')
assert modifier is not None
assert len(modifier.children) == 1
into = ast.get_child('into')
assert into is not None
assert len(into.children) == 1
assert into.children[0].children[0].value == 'table'
columns = ast.get_child('columns')
assert columns is not None
assert len(columns.children) == 1
assert len(columns.children[0].children) == 3
values = ast.get_child('values')
assert values is not None
assert len(values.children) == 1
assert len(values.children[0].children) == 3
def test_insert_without_columns_is_parsed():
raw_sql = '''
INSERT INTO table VALUES (100, 200, 300)
'''
ast = parse(raw_sql)
assert len(ast) > 0
ast = ast[0]
assert ast.type == 'insert'
assert len(ast.children) == 2
into = ast.get_child('into')
assert into is not None
assert len(into.children) == 1
assert into.children[0].children[0].value == 'table'
values = ast.get_child('values')
assert values is not None
assert len(values.children) == 1
assert len(values.children[0].children) == 3
def test_insert_with_select_is_parsed():
raw_sql = '''
INSERT INTO table SELECT col1, col2 FROM tbl2 WHERE col1 > 1 ON DUPLICATE
KEY UPDATE id = 1
'''
ast = parse(raw_sql)
assert len(ast) > 0
ast = ast[0]
assert ast.type == 'insert'
assert len(ast.children) == 3
into = ast.get_child('into')
assert into is not None
assert len(into.children) == 1
assert into.children[0].children[0].value == 'table'
select = ast.get_child('select')
assert select is not None
on = ast.get_child('on')
assert on is not None
assert len(on.children) == 1
duplicate = ast.get_child('duplicate')
assert duplicate is not None
assert len(duplicate.children) == 1
key = ast.get_child('key')
assert key is not None
assert len(key.children) == 1
update = ast.get_child('update')
assert update is not None
assert len(update.children) == 1
def test_insert_with_assignment_list_is_parsed():
raw_sql = '''
INSERT INTO table SET col1 = 2, col2 = 3
'''
ast = parse(raw_sql)
assert len(ast) > 0
ast = ast[0]
assert ast.type == 'insert'
assert len(ast.children) == 2
into = ast.get_child('into')
assert into is not None
assert len(into.children) == 1
assert into.children[0].children[0].value == 'table'
assignment_list = ast.get_child('assignment_list')
assert assignment_list is not None
assert len(assignment_list.children) == 2
assignment = assignment_list.children[0]
assert assignment.type == 'operator'
assert assignment.value == '='
assert len(assignment.children) == 2
assert assignment.children[0].value == 'col1'
assert assignment.children[1].value == '2'
assignment = assignment_list.children[1]
assert assignment.type == 'operator'
assert assignment.value == '='
assert len(assignment.children) == 2
assert assignment.children[0].value == 'col2'
assert assignment.children[1].value == '3'
| 26.353383
| 77
| 0.650499
| 493
| 3,505
| 4.525355
| 0.115619
| 0.104886
| 0.069027
| 0.087405
| 0.679964
| 0.558046
| 0.558046
| 0.51905
| 0.51905
| 0.51905
| 0
| 0.030303
| 0.22796
| 3,505
| 132
| 78
| 26.55303
| 0.794161
| 0
| 0
| 0.56
| 0
| 0
| 0.124964
| 0
| 0
| 0
| 0
| 0
| 0.56
| 1
| 0.04
| false
| 0
| 0.03
| 0
| 0.07
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0883af2fe80ecab9fbfc1b7be524e037979d920a
| 518
|
py
|
Python
|
testing/examples/talib-macd.py
|
pchaos/quanttesting
|
98331670547e8a45ba93b49f3e9c660495645114
|
[
"MIT"
] | 5
|
2020-04-08T14:14:05.000Z
|
2021-06-29T03:42:01.000Z
|
testing/examples/talib-macd.py
|
pchaos/quanttesting
|
98331670547e8a45ba93b49f3e9c660495645114
|
[
"MIT"
] | null | null | null |
testing/examples/talib-macd.py
|
pchaos/quanttesting
|
98331670547e8a45ba93b49f3e9c660495645114
|
[
"MIT"
] | 7
|
2020-04-15T15:07:39.000Z
|
2022-03-23T05:44:02.000Z
|
'''
Ta-lib计算MACD
'''
import pandas as pd
import numpy as np
import talib as ta
import tushare as ts
from matplotlib import rc
import matplotlib.pyplot as plt
import seaborn as sns
rc('mathtext', default='regular')
sns.set_style('white')
# %matplotlib
plt.rcParams["figure.figsize"] = (20, 10)
dw = ts.get_k_data("600600")
close = dw.close.values
dw['macd'], dw['macdsignal'], dw['macdhist'] = ta.MACD(close, fastperiod=12, slowperiod=26, signalperiod=9)
dw[['close','macd','macdsignal','macdhist']].plot()
plt.show()
| 24.666667
| 107
| 0.722008
| 79
| 518
| 4.696203
| 0.594937
| 0.037736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032538
| 0.110039
| 518
| 21
| 108
| 24.666667
| 0.772234
| 0.048263
| 0
| 0
| 0
| 0
| 0.183128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.466667
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0887199a887a1fbf59285a7c42522a561d36fdf6
| 160
|
py
|
Python
|
jsons.py
|
tebeka/py2go-cheatsheet
|
14c83850876ef80c36af326ab4fc6f56344781c7
|
[
"BSD-3-Clause"
] | 13
|
2017-09-09T08:32:34.000Z
|
2022-02-28T04:32:43.000Z
|
jsons.py
|
tebeka/py2go-cheatsheet
|
14c83850876ef80c36af326ab4fc6f56344781c7
|
[
"BSD-3-Clause"
] | 3
|
2017-11-25T18:48:11.000Z
|
2017-12-30T13:00:04.000Z
|
jsons.py
|
tebeka/py2go-cheatsheet
|
14c83850876ef80c36af326ab4fc6f56344781c7
|
[
"BSD-3-Clause"
] | 2
|
2019-11-03T19:58:17.000Z
|
2020-04-28T01:14:17.000Z
|
import json
from sys import stdout
# START
data = '''{
"name": "bugs",
"age": 76
}'''
obj = json.loads(data)
json.dump(obj, stdout)
# END
print(obj)
| 10.666667
| 22
| 0.59375
| 23
| 160
| 4.130435
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016
| 0.21875
| 160
| 14
| 23
| 11.428571
| 0.744
| 0.05625
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0.111111
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
088834b65e8fc3335e7c944aeb1e307017ece6c9
| 1,258
|
py
|
Python
|
opetuskoodi/2021_10_18/2_kerta_kertaus.py
|
mikkokotola/pythonkoodaus
|
5415b3d87dfcb65b72edb916967824304d155d9a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
opetuskoodi/2021_10_18/2_kerta_kertaus.py
|
mikkokotola/pythonkoodaus
|
5415b3d87dfcb65b72edb916967824304d155d9a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
opetuskoodi/2021_10_18/2_kerta_kertaus.py
|
mikkokotola/pythonkoodaus
|
5415b3d87dfcb65b72edb916967824304d155d9a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
# Kertaus, kerta 3
# Muuttujat ja syötteen lukeminen käyttäjältä
nimi = input("Anna nimesi: ")
kengännumero = input("Mikä on kengännumerosi: ")
print("Moi vaan, " + nimi + "! Kengännumerosi on " + kengännumero + ".")
# F-merkkijono
print(f"Moi vaan, {nimi}! Kengännumerosi on {kengännumero}.")
# Numerot
# Ikälaskuri
syntymävuosi = input("Mikä on syntymävuotesi? ")
syntymävuosi = int(syntymävuosi) # Muunnetaan merkkijono kokonaisluvuksi, jotta voimme laskea sillä
ikä = 2021 - syntymävuosi
print(f"Ikäsi vuoden 2021 lopussa on {ikä}")
# Laskin, joka osaa kertoa lukuja
luku1 = int(input("Anna luku: "))
luku2 = int(input("Anna toinen luku: "))
tulos = luku1 * luku2
print(f"{luku1} * {luku2} = {tulos}")
# Laskin, joka laskee kolmen luvun summan
summa = 0
luku = int(input("Ensimmäinen luku: "))
summa = summa + luku
luku = int(input("Toinen luku: "))
summa = summa + luku
luku = int(input("kolmas luku: "))
summa = summa + luku
print(f"Lukujen summa: {summa}")
# Minkälaisia laskuja voi laskea
print(5+2)
print(5-2)
print(5*2)
print(5/2)
print(5//2)
print(5%2)
print(2 + 2 * 3)
print((2 + 2) * 3)
# Liukuluvut = desimaaliluvut
luku1 = 4.0
luku2 = 1.5
tulos = luku1 - luku2
print(f"Tulos on {tulos}")
print(f"{luku1} - {luku2} = {tulos}")
| 21.689655
| 99
| 0.683625
| 174
| 1,258
| 4.942529
| 0.373563
| 0.04186
| 0.048837
| 0.083721
| 0.305814
| 0.215116
| 0.124419
| 0.054651
| 0.054651
| 0.054651
| 0
| 0.041825
| 0.163752
| 1,258
| 57
| 100
| 22.070175
| 0.775665
| 0.228935
| 0
| 0.090909
| 0
| 0
| 0.356621
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.454545
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
08967bfbf25d6987de9933fc65d4f932dbcd6e60
| 1,307
|
py
|
Python
|
src/model/RoleProxy.py
|
JulienGrv/puremvc-python-demo-PySide-employeeadmin
|
b076493ac34254e665b485259b0a7122fa9cfde4
|
[
"BSD-3-Clause"
] | 4
|
2017-08-26T10:18:10.000Z
|
2020-07-28T19:50:54.000Z
|
src/model/RoleProxy.py
|
JulienGrv/puremvc-python-demo-PySide-employeeadmin
|
b076493ac34254e665b485259b0a7122fa9cfde4
|
[
"BSD-3-Clause"
] | null | null | null |
src/model/RoleProxy.py
|
JulienGrv/puremvc-python-demo-PySide-employeeadmin
|
b076493ac34254e665b485259b0a7122fa9cfde4
|
[
"BSD-3-Clause"
] | 3
|
2020-09-22T12:17:14.000Z
|
2021-07-16T12:28:18.000Z
|
# -*- coding: utf-8 -*-
from puremvc.patterns.proxy import Proxy
from .. import ApplicationFacade
class RoleProxy(Proxy):
NAME = 'RoleProxy'
def __init__(self, proxyName=None, data=[]):
super(RoleProxy, self).__init__(proxyName, data)
self.data = data
def addItem(self, role):
self.data.append(role)
def deleteItem(self, user):
for role in self.data:
if role.username == user.username:
self.data.remove(role)
break
def doesUserHaveRole(self, user, role):
return role in self.getUserRoles(user.username)
def addRoleToUser(self, user, role):
result = False
if not self.doesUserHaveRole(user, role):
userRoles = self.getUserRoles(user.username)
userRoles.append(role)
result = True
self.sendNotification(ApplicationFacade.ADD_ROLE_RESULT, result)
def removeRoleFromUser(self, user, role):
if self.doesUserHaveRole(user, role):
userRoles = self.getUserRoles(user.username)
userRoles.remove(role)
def getUserRoles(self, username):
userRoles = None
for userRoles in self.data:
if userRoles.username == username:
break
return userRoles.roles
| 27.808511
| 72
| 0.61974
| 138
| 1,307
| 5.797101
| 0.311594
| 0.05
| 0.045
| 0.105
| 0.185
| 0.185
| 0.185
| 0.185
| 0.185
| 0.185
| 0
| 0.001071
| 0.285386
| 1,307
| 46
| 73
| 28.413043
| 0.85546
| 0.016067
| 0
| 0.121212
| 0
| 0
| 0.007009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212121
| false
| 0
| 0.060606
| 0.030303
| 0.393939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0896a00f400830a8eb41593559f65d607a6a09c6
| 1,358
|
py
|
Python
|
flappy-brird/utils/radio.py
|
victorathanasio/Personal-projects
|
94c870179cec32aa733a612a6faeb047df16d977
|
[
"MIT"
] | null | null | null |
flappy-brird/utils/radio.py
|
victorathanasio/Personal-projects
|
94c870179cec32aa733a612a6faeb047df16d977
|
[
"MIT"
] | null | null | null |
flappy-brird/utils/radio.py
|
victorathanasio/Personal-projects
|
94c870179cec32aa733a612a6faeb047df16d977
|
[
"MIT"
] | null | null | null |
import pygame
import os
class Radio:
def __init__(self, settings):
"""
Method that initiates the object Radio for game sounds
Input = (Dict)
"""
pygame.mixer.init()
self.file_die_sound = pygame.mixer.Sound('Assets/Sounds/die.mp3')
self.file_hit_sound = pygame.mixer.Sound('Assets/Sounds/hit.mp3')
self.file_wing_sound = pygame.mixer.Sound('Assets/Sounds/wing.mp3')
self.file_score_sound = pygame.mixer.Sound('Assets/Sounds/point.mp3')
self.volume = settings['Sound Volume']
self.file_score_sound.set_volume(self.volume * 0.3)
self.file_die_sound.set_volume(self.volume)
self.file_hit_sound.set_volume(self.volume)
self.file_wing_sound.set_volume(self.volume)
self.file_score_sound.set_volume(self.volume)
def die_sound(self):
"""
Method that play the death sound
"""
self.file_die_sound.play()
def score_sound(self):
"""
Method that play the score sound
"""
self.file_score_sound.play()
def hit_sound(self):
"""
Method that play the hit sound
"""
self.file_hit_sound.play()
def wing_sound(self):
"""
Method that play the wing beat sound
"""
self.file_wing_sound.play()
| 25.148148
| 77
| 0.611193
| 176
| 1,358
| 4.494318
| 0.204545
| 0.131479
| 0.088496
| 0.11378
| 0.510746
| 0.510746
| 0.212389
| 0.108723
| 0.108723
| 0
| 0
| 0.006141
| 0.28056
| 1,358
| 53
| 78
| 25.622642
| 0.80348
| 0.150957
| 0
| 0
| 0
| 0
| 0.09668
| 0.084961
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.086957
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08a0d32c04f17aef2a43162c106a80b10c85518c
| 999
|
py
|
Python
|
setup.py
|
sunghyunzz/aiohttp-toolbox
|
1948a1962b3bd4071f234719b6683b55cd03d6f0
|
[
"MIT"
] | 6
|
2016-11-21T08:38:34.000Z
|
2019-02-20T12:56:16.000Z
|
setup.py
|
sunghyunzz/aiohttp-toolbox
|
1948a1962b3bd4071f234719b6683b55cd03d6f0
|
[
"MIT"
] | 1
|
2017-07-20T02:20:03.000Z
|
2017-07-20T02:20:03.000Z
|
setup.py
|
sunghyunzz/aiohttp-toolbox
|
1948a1962b3bd4071f234719b6683b55cd03d6f0
|
[
"MIT"
] | 2
|
2017-07-20T02:20:44.000Z
|
2019-02-21T13:37:37.000Z
|
"""
aiohttp-ultrajson
-----------------
Integrates UltraJSON with your aiohttp application.
"""
from setuptools import setup
setup(
name='aiohttp-ultrajson',
version='0.1.0',
url='https://github.com/sunghyunzz/aiohttp-ultrajson',
license='MIT',
author='sunghyunzz',
author_email='[email protected]',
description='Integrates UltraJSON with your aiohttp application.',
long_description=__doc__,
py_modules=['aiohttp_ultrajson'],
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp>2',
'ujson>=1.34'
],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: AsyncIO'
]
)
| 27
| 70
| 0.608609
| 98
| 999
| 6.102041
| 0.622449
| 0.107023
| 0.167224
| 0.130435
| 0.150502
| 0.150502
| 0
| 0
| 0
| 0
| 0
| 0.017016
| 0.235235
| 999
| 36
| 71
| 27.75
| 0.765707
| 0.088088
| 0
| 0
| 0
| 0
| 0.528239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.034483
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08a4afc96f7c56b3ec32526d5ad975c5272d1d27
| 1,225
|
py
|
Python
|
setup.py
|
danizen/apache-replay
|
5e5cc8d0df693f2367d188d71099041c6a65317f
|
[
"MIT"
] | null | null | null |
setup.py
|
danizen/apache-replay
|
5e5cc8d0df693f2367d188d71099041c6a65317f
|
[
"MIT"
] | null | null | null |
setup.py
|
danizen/apache-replay
|
5e5cc8d0df693f2367d188d71099041c6a65317f
|
[
"MIT"
] | null | null | null |
from setuptools import setup
def get_readme():
with open('README.md') as f:
return f.read()
setup(
name = 'apache-replay',
version = '0.0.3',
url = 'https://github.com/danizen/apache-replay.git',
author = 'Daniel Davis',
author_email = '[email protected]',
description = 'Facilitates replaying of Apache files in Common Log and Combined Log format',
long_description = get_readme(),
long_description_content_type='text/markdown; charset=UTF-8; variant=CommonMark',
packages = ['apache_replay'],
entry_points={
'console_scripts': [
'apache-replay=apache_replay.script:main',
]
},
install_requires = ['attrs', 'requests'],
tests_require = ['attrs', 'requests', 'pytest', 'pytest-pythonpath', 'pytest-cov', 'tox'],
classifiers = [
'Development Status :: 3 - Alpha',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Software Development :: Testing :: Traffic Generation',
]
)
| 33.108108
| 96
| 0.625306
| 128
| 1,225
| 5.882813
| 0.742188
| 0.079681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006403
| 0.235102
| 1,225
| 36
| 97
| 34.027778
| 0.797225
| 0
| 0
| 0
| 0
| 0
| 0.526531
| 0.031837
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| true
| 0
| 0.03125
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08a6713846bc912e38363c64df0ddb98d1d40470
| 464
|
py
|
Python
|
setup.py
|
duytintruong/do_more
|
3a306da78ca302d2963cc7bae5f17e668168b595
|
[
"MIT"
] | null | null | null |
setup.py
|
duytintruong/do_more
|
3a306da78ca302d2963cc7bae5f17e668168b595
|
[
"MIT"
] | null | null | null |
setup.py
|
duytintruong/do_more
|
3a306da78ca302d2963cc7bae5f17e668168b595
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
setup(
name='do_more',
packages=['do_more'],
version='0.1.0',
description='A library enhancing pydoit features.',
author='Duy Tin Truong',
author_email='',
url='https://github.com/duytintruong/do_more',
download_url='https://github.com/duytintruong/do_more/archive/0.1.0.tar.gz',
keywords=['pipeline', 'data', 'doit'],
classifiers=[],
install_requires=[
'doit>=0.31.1',
],
)
| 27.294118
| 80
| 0.637931
| 60
| 464
| 4.816667
| 0.666667
| 0.083045
| 0.020761
| 0.117647
| 0.242215
| 0.242215
| 0.242215
| 0
| 0
| 0
| 0
| 0.026247
| 0.178879
| 464
| 16
| 81
| 29
| 0.732283
| 0
| 0
| 0
| 0
| 0.0625
| 0.422414
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.0625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08ac337e36cbf17a299188a90d4c593630ec7136
| 786
|
py
|
Python
|
centraldogma/util.py
|
line/centraldogma-python
|
2248e8d7d660c0535aa747a70742ddd2bb0a5268
|
[
"Apache-2.0"
] | 8
|
2021-12-02T00:51:35.000Z
|
2022-01-07T09:49:08.000Z
|
centraldogma/util.py
|
line/centraldogma-python
|
2248e8d7d660c0535aa747a70742ddd2bb0a5268
|
[
"Apache-2.0"
] | 8
|
2021-11-22T03:37:17.000Z
|
2022-02-14T10:02:31.000Z
|
centraldogma/util.py
|
line/centraldogma-python
|
2248e8d7d660c0535aa747a70742ddd2bb0a5268
|
[
"Apache-2.0"
] | 4
|
2021-11-22T03:48:39.000Z
|
2021-12-31T05:42:43.000Z
|
# Copyright 2021 LINE Corporation
#
# LINE Corporation licenses this file to you under the Apache License,
# version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def to_string(obj) -> str:
items = vars(obj).items()
values = [f"{k}={v}" for k, v in items]
return f"{obj.__class__.__name__}({','.join(values)})"
| 39.3
| 78
| 0.720102
| 121
| 786
| 4.603306
| 0.628099
| 0.10772
| 0.046679
| 0.057451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012461
| 0.183206
| 786
| 19
| 79
| 41.368421
| 0.85514
| 0.75827
| 0
| 0
| 0
| 0
| 0.291429
| 0.251429
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08ad2e5befe9beab57f5cfbb4752e8b8f6f82193
| 3,834
|
py
|
Python
|
Build/site_scons/msvs_preprocessed.py
|
Syeberman/nohtyP
|
59d7214a5a5474a03c54f45d79ad4fd037989a79
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Build/site_scons/msvs_preprocessed.py
|
Syeberman/nohtyP
|
59d7214a5a5474a03c54f45d79ad4fd037989a79
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Build/site_scons/msvs_preprocessed.py
|
Syeberman/nohtyP
|
59d7214a5a5474a03c54f45d79ad4fd037989a79
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
"""Provides a Preprocessed action for the Microsoft Visual Studio compilers.
"""
import os
import SCons.Action
import SCons.Util
import preprocessed_builder
# XXX These are internal to SCons and may change in the future...but it's unlikely
from SCons.Tool.msvc import CSuffixes, CXXSuffixes, msvc_batch_key
# TODO Contribute this back to SCons
def _preprocessed_emitter(target, source, env, suffix):
target = [
SCons.Util.adjustixes(str(t), "", suffix, ensure_suffix=False)
for t in target
]
return (target, source)
def c_preprocessed_emitter(target, source, env):
suffix = env.subst('$CPREPROCESSEDSUFFIX')
return _preprocessed_emitter(target, source, env, suffix)
def cxx_preprocessed_emitter(target, source, env):
suffix = env.subst('$CXXPREPROCESSEDSUFFIX')
return _preprocessed_emitter(target, source, env, suffix)
# XXX Adapted from SCons' msvc_output_flag
def msvc_pp_output_flag(target, source, env, for_signature):
"""
Returns the correct /Fi flag for batching.
If batching is disabled or there's only one source file, then we
return an /Fi string that specifies the target explicitly. Otherwise,
we return an /Fi string that just specifies the first target's
directory (where the Visual C/C++ compiler will put the .i files).
"""
# TODO /Fi is not supported on Visual Studio 9.00 (2008) and earlier
# https://msdn.microsoft.com/en-us/library/8z9z0bx6(v=vs.90).aspx
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better. Removed
# len(source)==1 as batch mode can compile only one file
# (and it also fixed problem with compiling only one changed file
# with batch mode enabled)
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
return '/Fi$TARGET'
else:
# The Visual C/C++ compiler requires a \ at the end of the /Fi
# option to indicate an output directory. We use os.sep here so
# that the test(s) for this can be run on non-Windows systems
# without having a hard-coded backslash mess up command-line
# argument parsing.
return '/Fi${TARGET.dir}' + os.sep
CPreprocessedAction = SCons.Action.Action("$PPCCCOM", "$PPCCCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
CXXPreprocessedAction = SCons.Action.Action("$PPCXXCOM", "$PPCXXCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
def generate_PreprocessedBuilder(env):
preprocessed = preprocessed_builder.createPreprocessedBuilder(env)
for suffix in CSuffixes:
preprocessed.add_action(suffix, CPreprocessedAction)
preprocessed.add_emitter(suffix, c_preprocessed_emitter)
for suffix in CXXSuffixes:
preprocessed.add_action(suffix, CXXPreprocessedAction)
preprocessed.add_emitter(suffix, cxx_preprocessed_emitter)
env['_MSVC_PP_OUTPUT_FLAG'] = msvc_pp_output_flag
# PPCC is the preprocessor-only mode for CC, the C compiler (compare with SHCC et al)
# TODO For SCons: be smart and when passed a preprocessed file, compiler skips certain options?
env['PPCC'] = '$CC'
env['PPCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['PPCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['PPCCCOM'] = '${TEMPFILE("$PPCC /P $_MSVC_PP_OUTPUT_FLAG /c $CHANGED_SOURCES $PPCFLAGS $PPCCFLAGS $_CCCOMCOM","$PPCCCOMSTR")}'
env['PPCXX'] = '$CXX'
env['PPCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['PPCXXCOM'] = '${TEMPFILE("$PPCXX /P $_MSVC_PP_OUTPUT_FLAG /c $CHANGED_SOURCES $PPCXXFLAGS $PPCCFLAGS $_CCCOMCOM","$PPCXXCOMSTR")}'
| 41.673913
| 139
| 0.684142
| 498
| 3,834
| 5.144578
| 0.401606
| 0.02459
| 0.035129
| 0.0605
| 0.177986
| 0.163154
| 0.130367
| 0.094457
| 0
| 0
| 0
| 0.005005
| 0.21831
| 3,834
| 91
| 140
| 42.131868
| 0.84985
| 0.361242
| 0
| 0.136364
| 0
| 0.045455
| 0.206681
| 0.049687
| 0
| 0
| 0
| 0.021978
| 0
| 1
| 0.113636
| false
| 0
| 0.113636
| 0
| 0.340909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08b53292c5c752e44fcf8b466dc5d84fa3ed0ec7
| 231
|
py
|
Python
|
server.py
|
LuisAlbizo/luisalbizo.github.io
|
823cac2c184686eb5056f9e1d3d0790f9a2233e1
|
[
"MIT"
] | null | null | null |
server.py
|
LuisAlbizo/luisalbizo.github.io
|
823cac2c184686eb5056f9e1d3d0790f9a2233e1
|
[
"MIT"
] | null | null | null |
server.py
|
LuisAlbizo/luisalbizo.github.io
|
823cac2c184686eb5056f9e1d3d0790f9a2233e1
|
[
"MIT"
] | null | null | null |
import http.server
import os
import socketserver
Handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(("127.0.0.1", 8080), Handler)
print("server:\thttp://127.0.0.1:8080\n\nlog:")
httpd.serve_forever()
| 19.25
| 60
| 0.757576
| 33
| 231
| 5.272727
| 0.575758
| 0.114943
| 0.057471
| 0.068966
| 0.114943
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094787
| 0.08658
| 231
| 11
| 61
| 21
| 0.729858
| 0
| 0
| 0
| 0
| 0
| 0.203463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
08bd8918199d2e5006f69cc8ccd6b3fde0ba16d8
| 1,850
|
py
|
Python
|
python/test_golden_master.py
|
AEGISoft/GildedRose-Refactoring-Kata
|
a81452de5b6831fa6c4f42b15f827ecf6ef29807
|
[
"MIT"
] | null | null | null |
python/test_golden_master.py
|
AEGISoft/GildedRose-Refactoring-Kata
|
a81452de5b6831fa6c4f42b15f827ecf6ef29807
|
[
"MIT"
] | null | null | null |
python/test_golden_master.py
|
AEGISoft/GildedRose-Refactoring-Kata
|
a81452de5b6831fa6c4f42b15f827ecf6ef29807
|
[
"MIT"
] | null | null | null |
import unittest
from gilded_rose import Item, GildedRose
class GoldenMasterTest(unittest.TestCase):
def test_golden_master(self):
output_file = None
try:
output_file = open("output.txt", 'r')
golden_master_lines = [output_file.readlines()]
finally:
output_file.close()
lines = golden_master_test_run()
for i in range(len(golden_master_lines) - 1):
self.assertEquals(golden_master_lines[i], lines[i])
def golden_master_test_run():
lines = ["OMGHAI!"]
items = [
Item(name="+5 Dexterity Vest", sell_in=10, quality=20),
Item(name="Aged Brie", sell_in=2, quality=0),
Item(name="Elixir of the Mongoose", sell_in=5, quality=7),
Item(name="Sulfuras, Hand of Ragnaros", sell_in=0, quality=80),
Item(name="Sulfuras, Hand of Ragnaros", sell_in=-1, quality=80),
Item(name="Backstage passes to a TAFKAL80ETC concert", sell_in=15, quality=20),
Item(name="Backstage passes to a TAFKAL80ETC concert", sell_in=10, quality=49),
Item(name="Backstage passes to a TAFKAL80ETC concert", sell_in=5, quality=49),
Item(name="Conjured Mana Cake", sell_in=3, quality=6), # <-- :O
]
days = 2
import sys
if len(sys.argv) > 1:
days = int(sys.argv[1]) + 1
for day in range(days):
lines.append("-------- day %s --------" % day)
lines.append("name, sellIn, quality")
for item in items:
lines.append(str(item))
lines.append("")
GildedRose(items).update_quality()
return lines
def persist_golden_master_testrun():
output_file = open("output.txt", mode="w+")
for line in golden_master_test_run():
output_file.write(line)
output_file.write("\n")
if __name__ == '__main__':
unittest.main()
| 31.355932
| 87
| 0.617838
| 247
| 1,850
| 4.441296
| 0.368421
| 0.065634
| 0.04649
| 0.05196
| 0.244303
| 0.20237
| 0.20237
| 0.20237
| 0.136737
| 0.136737
| 0
| 0.027937
| 0.245405
| 1,850
| 58
| 88
| 31.896552
| 0.75788
| 0.003243
| 0
| 0
| 0
| 0
| 0.176982
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 1
| 0.066667
| false
| 0.066667
| 0.066667
| 0
| 0.177778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
08cc589cc9423942aa94cc3bb343109a1f7cba67
| 18,161
|
py
|
Python
|
tests/strategies/test_horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | 1
|
2022-02-28T17:34:11.000Z
|
2022-02-28T17:34:11.000Z
|
tests/strategies/test_horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | null | null | null |
tests/strategies/test_horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | 1
|
2021-04-10T19:57:05.000Z
|
2021-04-10T19:57:05.000Z
|
import unittest
from unittest.mock import patch, Mock
import pandas as pd
from pandas.testing import assert_frame_equal
from parameterized import parameterized
from dgraphpandas.strategies.horizontal import horizontal_transform
class HorizontalTests(unittest.TestCase):
@parameterized.expand([
(None, {'config': {}}, 'config_key'),
(pd.DataFrame(), None, 'config_key'),
(pd.DataFrame(), '', 'config_key'),
(pd.DataFrame(), {'config': {}}, None),
(pd.DataFrame(), {'config': {}}, ''),
])
def test_horizontal_transform_null_parameters(self, frame, config, config_file_key):
'''
Ensures when parameters are null, then an
error is raised
'''
with self.assertRaises(ValueError):
horizontal_transform(frame, config, config_file_key)
def test_horizontal_config_key_does_not_exist(self):
'''
Ensures when the config key does not exist
within the config then an error is raised
'''
frame = pd.DataFrame()
config_key = 'my_key'
config = {
'files': {
'some_other_key': {}
}
}
with self.assertRaises(KeyError):
horizontal_transform(frame, config, config_key)
@parameterized.expand([
('',),
(None,),
])
def test_horizontal_subject_fields_not_provided(self, subject_fields):
'''
Ensures when subject fields is not provided
then an error is raised
'''
frame = pd.DataFrame()
config_key = 'my_key'
config = {
'files': {
'my_key': {
'subject_fields': subject_fields
}
}
}
with self.assertRaises(ValueError):
horizontal_transform(frame, config, config_key)
def test_horizontal_could_not_convert_type(self):
'''
Ensures when a type could not be applied to a column,
then an error is raised
'''
frame = pd.DataFrame(data={
'customer_id': [1, 2, 3],
'age': [23, 'not number', 56]
})
config = {
'files': {
'customer': {
'subject_fields': ['customer_id'],
'type_overrides': {
'customer_id': 'int32',
'age': 'int32'
}
}
}
}
config_file_key = 'customer'
with self.assertRaises(SystemExit):
horizontal_transform(frame, config, config_file_key)
@parameterized.expand([
###
(
'single_predicate',
pd.DataFrame(data={
'customer_id': [1, 2, 3],
'age': [23, 67, 56]
}),
{
'files': {
'customer': {
'subject_fields': ['customer_id'],
'type_overrides': {
'customer_id': 'int32',
'age': 'int32'
}
}
}
},
'customer',
pd.DataFrame(data={
'customer_id': pd.Series([1, 2, 3], dtype='int32'),
'predicate': pd.Series(['age']*3, dtype='O'),
'object': pd.Series([23, 67, 56], dtype='int32')
})
),
###
(
'multiple_predicates',
pd.DataFrame(data={
'customer_id': [1, 2, 3],
'age': [23, 67, 56],
'weight': [189, 167, 190]
}),
{
'files': {
'customer': {
'subject_fields': ['customer_id'],
'type_overrides': {
'customer_id': 'int32',
'age': 'int32',
'weight': 'int32'
}
}
}
},
'customer',
pd.DataFrame(data={
'customer_id': pd.Series([1, 2, 3, 1, 2, 3], dtype='int32'),
'predicate': pd.Series(['age']*3 + ['weight']*3, dtype='O'),
'object': pd.Series([23, 67, 56, 189, 167, 190], dtype='int32')
})
),
###
(
'multiple_subject_fields',
pd.DataFrame(data={
'customer_id': [1, 2, 3],
'order_id': [405, 210, 321],
'value': [200, 321, 67],
}),
{
'files': {
'order': {
'subject_fields': ['customer_id', 'order_id'],
'type_overrides': {
'customer_id': 'int32',
'order_id': 'int32',
'value': 'int32'
}
}
}
},
'order',
pd.DataFrame(data={
'customer_id': pd.Series([1, 2, 3], dtype='int32'),
'order_id': pd.Series([405, 210, 321], dtype='int32'),
'predicate': pd.Series(['value']*3, dtype='O'),
'object': pd.Series([200, 321, 67], dtype='int32')
})
)
])
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
def test_horizontal_melted_passed(self, name, frame, config, config_file_key, expected_melted, transform_mock: Mock):
'''
Ensures that the passed horizontal frame is melted and
passed into the vertical_transform.
Also ensures the same config and key are passed through
'''
intrinsic_mock = Mock(spec=pd.DataFrame)
edges_mock = Mock(spec=pd.DataFrame)
transform_mock.return_value = (intrinsic_mock, edges_mock)
intrinsic, edges = horizontal_transform(frame, config, config_file_key)
transform_mock.assert_called_once()
args, kwargs = transform_mock.call_args_list[0]
invoked_frame, invoked_config, invoked_key = args
assert_frame_equal(invoked_frame, expected_melted)
self.assertEqual(invoked_config, config)
self.assertEqual(invoked_key, config_file_key)
self.assertEqual(kwargs, {})
self.assertEqual(intrinsic_mock, intrinsic)
self.assertEqual(edges_mock, edges)
def test_horizontal_frame_only_has_subject_and_no_data_fields(self):
'''
Ensures when the horizontal frame only has subject fields
and no actual data fields then an error is raised
'''
frame = pd.DataFrame(data={
'customer_id': [1, 2, 3],
'order_id': [405, 210, 321]
})
config = {
'files': {
'order': {
'subject_fields': ['customer_id', 'order_id'],
'type_overrides': {
'customer_id': 'int32',
'order_id': 'int32',
}
}
}
}
config_key = 'order'
with self.assertRaises(ValueError):
horizontal_transform(frame, config, config_key)
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
@patch('dgraphpandas.strategies.horizontal.pd.read_csv', spec=pd.read_csv)
def test_horizontal_melted_file_path_passed(self, mock_pandas: Mock, mock_transform: Mock):
'''
Ensures when a file path(str) it passed into the transform, then the file
is read using read_csv before going into logic.
'''
file = 'test.csv'
frame = pd.DataFrame(data={
'customer_id': [1, 2, 3],
'age': [23, 67, 56]
})
config = {
'files': {
'customer': {
'subject_fields': ['customer_id'],
'type_overrides': {
'customer_id': 'int32',
'age': 'int32'
}
}
}
}
config_file_key = 'customer'
expected_melted = pd.DataFrame(data={
'customer_id': pd.Series([1, 2, 3], dtype='int32'),
'predicate': pd.Series(['age']*3, dtype='O'),
'object': pd.Series([23, 67, 56], dtype='int32')
})
mock_pandas.return_value = frame
horizontal_transform(file, config, config_file_key)
args, kwargs = mock_pandas.call_args_list[0]
self.assertEqual(file, args[0])
self.assertEqual({}, kwargs)
args, kwargs = mock_transform.call_args_list[0]
assert_frame_equal(expected_melted, args[0])
self.assertEqual(config, args[1])
self.assertEqual(config_file_key, args[2])
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
@patch('dgraphpandas.strategies.horizontal.pd.read_csv', spec=pd.read_csv)
def test_horizontal_melted_file_path_custom_csv_passed(self, mock_pandas: Mock, mock_transform: Mock):
'''
Ensures when a read_csv_options option is defined inside file configuration
it is applied to the pd.read_csv call.
'''
file = 'test.csv'
read_csv_options = {'sep': ';'}
frame = pd.DataFrame(data={
'customer_id': [1, 2, 3],
'age': [23, 67, 56]
})
config = {
'files': {
'customer': {
'subject_fields': ['customer_id'],
'type_overrides': {
'customer_id': 'int32',
'age': 'int32'
},
'read_csv_options': read_csv_options
}
}
}
config_file_key = 'customer'
expected_melted = pd.DataFrame(data={
'customer_id': pd.Series([1, 2, 3], dtype='int32'),
'predicate': pd.Series(['age']*3, dtype='O'),
'object': pd.Series([23, 67, 56], dtype='int32')
})
mock_pandas.return_value = frame
horizontal_transform(file, config, config_file_key)
args, kwargs = mock_pandas.call_args_list[0]
self.assertEqual(file, args[0])
self.assertEqual(read_csv_options, kwargs)
args, kwargs = mock_transform.call_args_list[0]
assert_frame_equal(expected_melted, args[0])
self.assertEqual(config, args[1])
self.assertEqual(config_file_key, args[2])
@parameterized.expand([
###
(
'year_wrong_order',
{'dob': {'format': "%Y-%m-%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['03-02-2021', '01-03-1945'],
'weight': [50, 32]
})
),
###
(
'alphanumerical_string',
{'dob': {'format': "%Y-%m-%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['not a date', '01-03-1945'],
'weight': [50, 32]
})
),
###
(
'missing_dashes',
{'dob': {'format': "%Y-%m%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['2021-03-02', '19450301'],
'weight': [50, 32]
})
),
###
(
'missing_dots',
{'dob': {'format': "%Y.%m.%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['2021-03-02', '1945.03&01'],
'weight': [50, 32]
})
),
###
(
'malformed_month_string',
{'dob': {'format': "%d-%b-%Y"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['02-FebFake-2021', '01-Mar-1945'],
'weight': [50, 32]
})
)
])
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
def test_horizontal_transform_incorrect_date_format(self, name, date_format, frame, transform_mock: Mock):
'''
Ensures when the date format provided does not match the value within the frame,
then an error is raised.
'''
config_file_key = 'customer'
config = {
'files': {
config_file_key: {
'subject_fields': ['customer_id'],
'date_fields': date_format
}
}
}
with self.assertRaisesRegex(ValueError, "time data (.*) (doesn't|does not) match format(.*)"):
horizontal_transform(frame, config, config_file_key)
transform_mock.assert_not_called()
@parameterized.expand([
###
(
'uncoverted_month_day',
{'dob': {'format': "%Y"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['2021-03-02', '1945-03-01'],
'weight': [50, 32]
})
),
###
(
'uncoverted_month_year',
{'dob': {'format': "%m-%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['03-02-2021', '03-01-2021'],
'weight': [50, 32]
})
)
])
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
def test_horizontal_transform_unconverted_date_parts(self, name, date_format, frame, transform_mock: Mock):
'''
Ensures when the date partially matches and there are some converted
parts, an error is raised
'''
config_file_key = 'customer'
config = {
'files': {
config_file_key: {
'subject_fields': ['customer_id'],
'date_fields': date_format
}
}
}
with self.assertRaisesRegex(ValueError, "unconverted data remains: (.*)"):
horizontal_transform(frame, config, config_file_key)
transform_mock.assert_not_called()
@parameterized.expand([
###
(
'dash_format',
{'dob': {'format': "%Y-%m-%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['2021-03-02', '1945-03-01'],
'weight': [50, 32]
}),
pd.DataFrame(data={
'customer_id': [1, 2, 1, 2],
'predicate': ['dob', 'dob', 'weight', 'weight'],
'object':[pd.to_datetime('2021-03-02 00:00:00'), pd.to_datetime('1945-03-01 00:00:00'), 50, 32]
})
),
###
(
'dot_format',
{'dob': {'format': "%Y.%m.%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['1999.05.09', '1789.02.12'],
'weight': [50, 32]
}),
pd.DataFrame(data={
'customer_id': [1, 2, 1, 2],
'predicate': ['dob', 'dob', 'weight', 'weight'],
'object': [pd.to_datetime('1999-05-09 00:00:00'), pd.to_datetime('1789-02-12 00:00:00'), 50, 32]
})
),
###
(
'multiple_date_fields',
{'updated_at': {'format': '%Y.%m.%d'}, 'dob': {'format': "%Y.%m.%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['1999.05.09', '1789.02.12'],
'updated_at': ['2021.03.02', '2021.03.04'],
'weight': [50, 32]
}),
pd.DataFrame(data={
'customer_id': [1, 2, 1, 2, 1, 2],
'predicate': ['dob', 'dob', 'updated_at', 'updated_at', 'weight', 'weight'],
'object': [
pd.to_datetime('1999-05-09 00:00:00'),
pd.to_datetime('1789-02-12 00:00:00'),
pd.to_datetime('2021-03-02 00:00:00'),
pd.to_datetime('2021-03-04 00:00:00'),
50,
32]
})
),
###
(
'multiple_date_fields_different_formats',
{'updated_at': {'format': '%Y$%m$%d'}, 'dob': {'format': "%Y.%m.%d"}},
pd.DataFrame(data={
'customer_id': [1, 2],
'dob': ['1999.05.09', '1789.02.12'],
'updated_at': ['2021$03$02', '2021$03$04'],
'weight': [50, 32]
}),
pd.DataFrame(data={
'customer_id': [1, 2, 1, 2, 1, 2],
'predicate': ['dob', 'dob', 'updated_at', 'updated_at', 'weight', 'weight'],
'object': [
pd.to_datetime('1999-05-09 00:00:00'),
pd.to_datetime('1789-02-12 00:00:00'),
pd.to_datetime('2021-03-02 00:00:00'),
pd.to_datetime('2021-03-04 00:00:00'),
50,
32]
})
)
])
@patch('dgraphpandas.strategies.horizontal.vertical_transform')
def test_horizontal_transform_correct_date_format(self, name, date_format, frame, expected_melted, transform_mock: Mock):
'''
Ensures when the date_format provided is in the correct format,
no error is raised
'''
config_file_key = 'customer'
config = {
'files': {
config_file_key: {
'subject_fields': ['customer_id'],
'date_fields': date_format
}
}
}
horizontal_transform(frame, config, config_file_key)
transform_mock.assert_called_once()
args, kwargs = transform_mock.call_args_list[0]
passed_frame, passed_config, passed_config_key = args
assert_frame_equal(passed_frame, expected_melted)
self.assertEqual(passed_config, config)
self.assertEqual(passed_config_key, config_file_key)
self.assertEqual(kwargs, {})
| 34.395833
| 125
| 0.472
| 1,763
| 18,161
| 4.657969
| 0.111174
| 0.05358
| 0.049318
| 0.075621
| 0.738066
| 0.687652
| 0.66963
| 0.649294
| 0.644423
| 0.612884
| 0
| 0.062297
| 0.38836
| 18,161
| 527
| 126
| 34.461101
| 0.67699
| 0.058587
| 0
| 0.62877
| 0
| 0
| 0.190721
| 0.032026
| 0
| 0
| 0
| 0
| 0.074246
| 1
| 0.025522
| false
| 0.016241
| 0.013921
| 0
| 0.041763
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08dcaa11c309d6ad11738f4ba7bc30c87f71fe32
| 274
|
py
|
Python
|
templates/python.py
|
limacat76/Polyglot-Study
|
ec71186d4dfbecebf372eb11affd9b5a2b76e47a
|
[
"MIT"
] | null | null | null |
templates/python.py
|
limacat76/Polyglot-Study
|
ec71186d4dfbecebf372eb11affd9b5a2b76e47a
|
[
"MIT"
] | null | null | null |
templates/python.py
|
limacat76/Polyglot-Study
|
ec71186d4dfbecebf372eb11affd9b5a2b76e47a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from sys import argv
from sys import stdin
from sys import stdout
alp = len(argv)
if alp > 1 and argv[1] == "--version":
print ('version 0.1')
quit()
if alp > 1 and argv[1] == "--help":
print ('ctrl+d to quit')
quit()
print('todo')
| 17.125
| 38
| 0.605839
| 46
| 274
| 3.608696
| 0.521739
| 0.126506
| 0.23494
| 0.108434
| 0.168675
| 0.168675
| 0
| 0
| 0
| 0
| 0
| 0.033019
| 0.226277
| 274
| 15
| 39
| 18.266667
| 0.75
| 0.076642
| 0
| 0.181818
| 0
| 0
| 0.174603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.272727
| 0
| 0.272727
| 0.272727
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08e17bfd02380a8da82eb6cb901cf80fe395ede2
| 34,734
|
py
|
Python
|
feature.py
|
TimothyChen225/AFC-X
|
901a0019b7c153804570c480c3da4825776dbf02
|
[
"MIT"
] | null | null | null |
feature.py
|
TimothyChen225/AFC-X
|
901a0019b7c153804570c480c3da4825776dbf02
|
[
"MIT"
] | null | null | null |
feature.py
|
TimothyChen225/AFC-X
|
901a0019b7c153804570c480c3da4825776dbf02
|
[
"MIT"
] | null | null | null |
from collections import Counter
from Bio import SeqIO
import numpy as np
import warnings
import math
warnings.filterwarnings(action='ignore', category=UserWarning, module='gensim')
from gensim.models import Word2Vec
Max_length = 100 # maximum length of used peptides
def check_length(file):
length = []
global Max_length
with open(file) as f:
for i in f:
if i[0] != ">":
length.append(len(i))
temp_max = max(length)
if temp_max > Max_length:
Max_length = temp_max
def add(x, i):
x_copy = x.copy()
x_copy[i] = 1
return x_copy
def BLOSUM62(seq):
blosum62 = {
'A': [4, -1, -2, -2, 0, -1, -1, 0, -2, -1, -1, -1, -1, -2, -1, 1, 0, -3, -2, 0], # A
'R': [-1, 5, 0, -2, -3, 1, 0, -2, 0, -3, -2, 2, -1, -3, -2, -1, -1, -3, -2, -3], # R
'N': [-2, 0, 6, 1, -3, 0, 0, 0, 1, -3, -3, 0, -2, -3, -2, 1, 0, -4, -2, -3], # N
'D': [-2, -2, 1, 6, -3, 0, 2, -1, -1, -3, -4, -1, -3, -3, -1, 0, -1, -4, -3, -3], # D
'C': [0, -3, -3, -3, 9, -3, -4, -3, -3, -1, -1, -3, -1, -2, -3, -1, -1, -2, -2, -1], # C
'Q': [-1, 1, 0, 0, -3, 5, 2, -2, 0, -3, -2, 1, 0, -3, -1, 0, -1, -2, -1, -2], # Q
'E': [-1, 0, 0, 2, -4, 2, 5, -2, 0, -3, -3, 1, -2, -3, -1, 0, -1, -3, -2, -2], # E
'G': [0, -2, 0, -1, -3, -2, -2, 6, -2, -4, -4, -2, -3, -3, -2, 0, -2, -2, -3, -3], # G
'H': [-2, 0, 1, -1, -3, 0, 0, -2, 8, -3, -3, -1, -2, -1, -2, -1, -2, -2, 2, -3], # H
'I': [-1, -3, -3, -3, -1, -3, -3, -4, -3, 4, 2, -3, 1, 0, -3, -2, -1, -3, -1, 3], # I
'L': [-1, -2, -3, -4, -1, -2, -3, -4, -3, 2, 4, -2, 2, 0, -3, -2, -1, -2, -1, 1], # L
'K': [-1, 2, 0, -1, -3, 1, 1, -2, -1, -3, -2, 5, -1, -3, -1, 0, -1, -3, -2, -2], # K
'M': [-1, -1, -2, -3, -1, 0, -2, -3, -2, 1, 2, -1, 5, 0, -2, -1, -1, -1, -1, 1], # M
'F': [-2, -3, -3, -3, -2, -3, -3, -3, -1, 0, 0, -3, 0, 6, -4, -2, -2, 1, 3, -1], # F
'P': [-1, -2, -2, -1, -3, -1, -1, -2, -2, -3, -3, -1, -2, -4, 7, -1, -1, -4, -3, -2], # P
'S': [1, -1, 1, 0, -1, 0, 0, 0, -1, -2, -2, 0, -1, -2, -1, 4, 1, -3, -2, -2], # S
'T': [0, -1, 0, -1, -1, -1, -1, -2, -2, -1, -1, -1, -1, -2, -1, 1, 5, -2, -2, 0], # T
'W': [-3, -3, -4, -4, -2, -2, -3, -2, -2, -3, -2, -3, -1, 1, -4, -3, -2, 11, 2, -3], # W
'Y': [-2, -2, -2, -3, -2, -1, -2, -3, 2, -1, -1, -2, -1, 3, -3, -2, -2, 2, 7, -1], # Y
'V': [0, -3, -3, -3, -1, -2, -2, -3, -3, 3, 1, -2, 1, -1, -2, -2, 0, -3, -1, 4], # V
'-': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # -
}
pad_len = Max_length - len(seq)
seqs = []
for aa in seq:
seqs.append(blosum62[aa])
for _ in range(pad_len):
seqs.append(blosum62['-'])
return seqs
def Count(aaSet, sequence):
number = 0
for aa in sequence:
if aa in aaSet:
number = number + 1
cutoffNums = [1, math.floor(0.25 * number), math.floor(0.50 * number), math.floor(0.75 * number), number]
cutoffNums = [i if i >= 1 else 1 for i in cutoffNums]
code = []
for cutoff in cutoffNums:
myCount = 0
for i in range(len(sequence)):
if sequence[i] in aaSet:
myCount += 1
if myCount == cutoff:
code.append((i + 1) / len(sequence) * Max_length)
break
if myCount == 0:
code.append(0)
return code
def CTDD(seq):
group1 = {
'hydrophobicity_PRAM900101': 'RKEDQN',
'hydrophobicity_ARGP820101': 'QSTNGDE',
'hydrophobicity_ZIMJ680101': 'QNGSWTDERA',
'hydrophobicity_PONP930101': 'KPDESNQT',
'hydrophobicity_CASG920101': 'KDEQPSRNTG',
'hydrophobicity_ENGD860101': 'RDKENQHYP',
'hydrophobicity_FASG890101': 'KERSQD',
'normwaalsvolume': 'GASTPDC',
'polarity': 'LIFWCMVY',
'polarizability': 'GASDT',
'charge': 'KR',
'secondarystruct': 'EALMQKRH',
'solventaccess': 'ALFCGIVW'
}
group2 = {
'hydrophobicity_PRAM900101': 'GASTPHY',
'hydrophobicity_ARGP820101': 'RAHCKMV',
'hydrophobicity_ZIMJ680101': 'HMCKV',
'hydrophobicity_PONP930101': 'GRHA',
'hydrophobicity_CASG920101': 'AHYMLV',
'hydrophobicity_ENGD860101': 'SGTAW',
'hydrophobicity_FASG890101': 'NTPG',
'normwaalsvolume': 'NVEQIL',
'polarity': 'PATGS',
'polarizability': 'CPNVEQIL',
'charge': 'ANCQGHILMFPSTWYV',
'secondarystruct': 'VIYCWFT',
'solventaccess': 'RKQEND'
}
group3 = {
'hydrophobicity_PRAM900101': 'CLVIMFW',
'hydrophobicity_ARGP820101': 'LYPFIW',
'hydrophobicity_ZIMJ680101': 'LPFYI',
'hydrophobicity_PONP930101': 'YMFWLCVI',
'hydrophobicity_CASG920101': 'FIWC',
'hydrophobicity_ENGD860101': 'CVLIMF',
'hydrophobicity_FASG890101': 'AYHWVMFLIC',
'normwaalsvolume': 'MHKFRYW',
'polarity': 'HQRKNED',
'polarizability': 'KMHFRYW',
'charge': 'DE',
'secondarystruct': 'GNPSD',
'solventaccess': 'MSPTHY'
}
groups = [group1, group2, group3]
property = (
'hydrophobicity_PRAM900101', 'hydrophobicity_ARGP820101', 'hydrophobicity_ZIMJ680101',
'hydrophobicity_PONP930101',
'hydrophobicity_CASG920101', 'hydrophobicity_ENGD860101', 'hydrophobicity_FASG890101', 'normwaalsvolume',
'polarity', 'polarizability', 'charge', 'secondarystruct', 'solventaccess')
encodings = []
code = []
for p in property:
code = code + Count(group1[p], seq) + Count(group2[p], seq) + Count(group3[p], seq)
encodings.append(code)
return encodings
def DPC(seq):
AA = 'ACDEFGHIKLMNPQRSTVWY'
encodings = []
diPeptides = [aa1 + aa2 for aa1 in AA for aa2 in AA]
# header = ['#'] + diPeptides
# encodings.append(header)
AADict = {}
for i in range(len(AA)):
AADict[AA[i]] = i
# for i in fastas:
# name, sequence = i[0], re.sub('-', '', i[1])
code = []
tmpCode = [0] * 400
for j in range(len(seq) - 2 + 1):
tmpCode[AADict[seq[j]] * 20 + AADict[seq[j + 1]]] = tmpCode[AADict[seq[j]] * 20 + AADict[
seq[j + 1]]] + 1
if sum(tmpCode) != 0:
tmpCode = [i / sum(tmpCode) for i in tmpCode]
code = code + tmpCode
encodings.append(code)
return encodings
def AAC(seq):
AA = 'ACDEFGHIKLMNPQRSTVWY'
# AA = 'ARNDCQEGHILKMFPSTWYV'
encodings = []
# for i in fastas:
# name, sequence = i[0], re.sub('-', '', i[1])
count = Counter(seq)
for key in count:
count[key] = count[key] / len(seq)
code = []
for aa in AA:
code.append(count[aa])
encodings.append(code)
return encodings
def ZSCALE(seq):
zscale = {
'A': [0.24, -2.32, 0.60, -0.14, 1.30], # A
'C': [0.84, -1.67, 3.71, 0.18, -2.65], # C
'D': [3.98, 0.93, 1.93, -2.46, 0.75], # D
'E': [3.11, 0.26, -0.11, -0.34, -0.25], # E
'F': [-4.22, 1.94, 1.06, 0.54, -0.62], # F
'G': [2.05, -4.06, 0.36, -0.82, -0.38], # G
'H': [2.47, 1.95, 0.26, 3.90, 0.09], # H
'I': [-3.89, -1.73, -1.71, -0.84, 0.26], # I
'K': [2.29, 0.89, -2.49, 1.49, 0.31], # K
'L': [-4.28, -1.30, -1.49, -0.72, 0.84], # L
'M': [-2.85, -0.22, 0.47, 1.94, -0.98], # M
'N': [3.05, 1.62, 1.04, -1.15, 1.61], # N
'P': [-1.66, 0.27, 1.84, 0.70, 2.00], # P
'Q': [1.75, 0.50, -1.44, -1.34, 0.66], # Q
'R': [3.52, 2.50, -3.50, 1.99, -0.17], # R
'S': [2.39, -1.07, 1.15, -1.39, 0.67], # S
'T': [0.75, -2.18, -1.12, -1.46, -0.40], # T
'V': [-2.59, -2.64, -1.54, -0.85, -0.02], # V
'W': [-4.36, 3.94, 0.59, 3.44, -1.59], # W
'Y': [-2.54, 2.44, 0.43, 0.04, -1.47], # Y
'-': [0.00, 0.00, 0.00, 0.00, 0.00], # -
}
encodings = []
# header = ['#']
# for p in range(1, len(fastas[0][1]) + 1):
# for z in ('1', '2', '3', '4', '5'):
# header.append('Pos' + str(p) + '.ZSCALE' + z)
# encodings.append(header)
# for i in fastas:
# name, sequence = i[0], i[1]
code = []
for _ in range(Max_length - len(seq)):
code = code + zscale['-']
for aa in seq:
code = code + zscale[aa]
encodings.append(code)
return encodings
def TPC(seq):
AA = 'ACDEFGHIKLMNPQRSTVWY'
encodings = []
triPeptides = [aa1 + aa2 + aa3 for aa1 in AA for aa2 in AA for aa3 in AA]
AADict = {}
for i in range(len(AA)):
AADict[AA[i]] = i
# for i in fastas:
# name, sequence = i[0], re.sub('-', '', i[1])
code = []
tmpCode = [0] * 8000
for j in range(len(seq) - 3 + 1):
tmpCode[AADict[seq[j]] * 400 + AADict[seq[j + 1]] * 20 + AADict[seq[j + 2]]] = tmpCode[AADict[seq[j]] * 400 +
AADict[seq[j + 1]] * 20 +
AADict[seq[j + 2]]] + 1
if sum(tmpCode) != 0:
tmpCode = [i / sum(tmpCode) for i in tmpCode]
code = code + tmpCode
encodings.append(code)
return encodings
def DDE(seq):
AA = 'ACDEFGHIKLMNPQRSTVWY'
myCodons = {
'A': 4,
'C': 2,
'D': 2,
'E': 2,
'F': 2,
'G': 4,
'H': 2,
'I': 3,
'K': 2,
'L': 6,
'M': 1,
'N': 2,
'P': 4,
'Q': 2,
'R': 6,
'S': 6,
'T': 4,
'V': 4,
'W': 1,
'Y': 2
}
encodings = []
diPeptides = [aa1 + aa2 for aa1 in AA for aa2 in AA]
myTM = []
for pair in diPeptides:
myTM.append((myCodons[pair[0]] / 61) * (myCodons[pair[1]] / 61))
AADict = {}
for i in range(len(AA)):
AADict[AA[i]] = i
# for i in fastas:
# name, sequence = i[0], re.sub('-', '', i[1])
code = []
tmpCode = [0] * 400
for j in range(len(seq) - 2 + 1):
tmpCode[AADict[seq[j]] * 20 + AADict[seq[j + 1]]] = tmpCode[AADict[seq[j]] * 20 + AADict[
seq[j + 1]]] + 1
if sum(tmpCode) != 0:
tmpCode = [i / sum(tmpCode) for i in tmpCode]
myTV = []
for j in range(len(myTM)):
myTV.append(myTM[j] * (1 - myTM[j]) / (len(seq) - 1))
for j in range(len(tmpCode)):
tmpCode[j] = (tmpCode[j] - myTM[j]) / math.sqrt(myTV[j])
code = code + tmpCode
encodings.append(code)
return encodings
def CalculateKSCTriad(sequence, gap, features, AADict):
res = []
for g in range(gap + 1):
myDict = {}
for f in features:
myDict[f] = 0
for i in range(len(sequence)):
if i + gap + 1 < len(sequence) and i + 2 * gap + 2 < len(sequence):
fea = AADict[sequence[i]] + '.' + AADict[sequence[i + gap + 1]] + '.' + AADict[
sequence[i + 2 * gap + 2]]
myDict[fea] = myDict[fea] + 1
maxValue, minValue = max(myDict.values()), min(myDict.values())
for f in features:
res.append((myDict[f] - minValue) / maxValue)
return res
def CTriad(seq):
AAGroup = {
'g1': 'AGV',
'g2': 'ILFP',
'g3': 'YMTS',
'g4': 'HNQW',
'g5': 'RK',
'g6': 'DE',
'g7': 'C'
}
myGroups = sorted(AAGroup.keys())
AADict = {}
for g in myGroups:
for aa in AAGroup[g]:
AADict[aa] = g
features = [f1 + '.' + f2 + '.' + f3 for f1 in myGroups for f2 in myGroups for f3 in myGroups]
encodings = []
# header = ['#']
# for f in features:
# header.append(f)
# encodings.append(header)
# me, sequence = i[0], re.sub('-', '', i[1])
code = []
if len(seq) < 3:
print('Error: for "CTriad" encoding, the input fasta sequences should be greater than 3. \n\n')
return 0
code = code + CalculateKSCTriad(seq, 0, features, AADict)
encodings.append(code)
return encodings
def CalculateKSCTriad(sequence, gap, features, AADict):
res = []
for g in range(gap + 1):
myDict = {}
for f in features:
myDict[f] = 0
for i in range(len(sequence)):
if i + g + 1 < len(sequence) and i + 2 * g + 2 < len(sequence):
fea = AADict[sequence[i]] + '.' + AADict[sequence[i + g + 1]] + '.' + AADict[sequence[i + 2 * g + 2]]
myDict[fea] = myDict[fea] + 1
maxValue, minValue = max(myDict.values()), min(myDict.values())
for f in features:
res.append((myDict[f] - minValue) / maxValue)
return res
def KSCTriad(seq, gap=1):
AAGroup = {
'g1': 'AGV',
'g2': 'ILFP',
'g3': 'YMTS',
'g4': 'HNQW',
'g5': 'RK',
'g6': 'DE',
'g7': 'C'
}
myGroups = sorted(AAGroup.keys())
AADict = {}
for g in myGroups:
for aa in AAGroup[g]:
AADict[aa] = g
features = [f1 + '.' + f2 + '.' + f3 for f1 in myGroups for f2 in myGroups for f3 in myGroups]
encodings = []
code = []
if len(seq) < 2 * gap + 3:
print('Error: for "KSCTriad" encoding, the input fasta sequences should be greater than (2*gap+3). \n\n')
return 0
code = code + CalculateKSCTriad(seq, gap, features, AADict)
encodings.append(code)
return encodings
def GTPC(seq):
group = {
'alphaticr': 'GAVLMI',
'aromatic': 'FYW',
'postivecharger': 'KRH',
'negativecharger': 'DE',
'uncharger': 'STCPNQ'
}
groupKey = group.keys()
baseNum = len(groupKey)
triple = [g1 + '.' + g2 + '.' + g3 for g1 in groupKey for g2 in groupKey for g3 in groupKey]
index = {}
for key in groupKey:
for aa in group[key]:
index[aa] = key
encodings = []
code = []
myDict = {}
for t in triple:
myDict[t] = 0
sum = 0
for j in range(len(seq) - 3 + 1):
myDict[index[seq[j]] + '.' + index[seq[j + 1]] + '.' + index[seq[j + 2]]] = myDict[index[seq[j]] + '.' + index[
seq[j + 1]] + '.' + index[seq[j + 2]]] + 1
sum = sum + 1
if sum == 0:
for t in triple:
code.append(0)
else:
for t in triple:
code.append(myDict[t] / sum)
encodings.append(code)
return encodings
def generateGroupPairs(groupKey):
gPair = {}
for key1 in groupKey:
for key2 in groupKey:
gPair[key1 + '.' + key2] = 0
return gPair
def CKSAAGP(seq, gap=2):
if gap < 0:
print('Error: the gap should be equal or greater than zero' + '\n\n')
return 0
group = {
'alphaticr': 'GAVLMI',
'aromatic': 'FYW',
'postivecharger': 'KRH',
'negativecharger': 'DE',
'uncharger': 'STCPNQ'
}
AA = 'ARNDCQEGHILKMFPSTWYV'
groupKey = group.keys()
index = {}
for key in groupKey:
for aa in group[key]:
index[aa] = key
gPairIndex = []
for key1 in groupKey:
for key2 in groupKey:
gPairIndex.append(key1 + '.' + key2)
encodings = []
code = []
for g in range(gap + 1):
gPair = generateGroupPairs(groupKey)
sum = 0
for p1 in range(len(seq)):
p2 = p1 + g + 1
if p2 < len(seq) and seq[p1] in AA and seq[p2] in AA:
gPair[index[seq[p1]] + '.' + index[seq[p2]]] = gPair[index[seq[p1]] + '.' + index[
seq[p2]]] + 1
sum = sum + 1
if sum == 0:
for gp in gPairIndex:
code.append(0)
else:
for gp in gPairIndex:
code.append(gPair[gp] / sum)
encodings.append(code)
return encodings
def GAAC(seq):
group = {
'alphatic': 'GAVLMI',
'aromatic': 'FYW',
'postivecharge': 'KRH',
'negativecharge': 'DE',
'uncharge': 'STCPNQ'
}
groupKey = group.keys()
encodings = []
code = []
count = Counter(seq)
myDict = {}
for key in groupKey:
for aa in group[key]:
myDict[key] = myDict.get(key, 0) + count[aa]
for key in groupKey:
code.append(myDict[key] / len(seq))
encodings.append(code)
return encodings
def GDPC(seq):
group = {
'alphaticr': 'GAVLMI',
'aromatic': 'FYW',
'postivecharger': 'KRH',
'negativecharger': 'DE',
'uncharger': 'STCPNQ'
}
groupKey = group.keys()
baseNum = len(groupKey)
dipeptide = [g1 + '.' + g2 for g1 in groupKey for g2 in groupKey]
index = {}
for key in groupKey:
for aa in group[key]:
index[aa] = key
encodings = []
code = []
myDict = {}
for t in dipeptide:
myDict[t] = 0
sum = 0
for j in range(len(seq) - 2 + 1):
myDict[index[seq[j]] + '.' + index[seq[j + 1]]] = myDict[index[seq[j]] + '.' + index[
seq[j + 1]]] + 1
sum = sum + 1
if sum == 0:
for t in dipeptide:
code.append(0)
else:
for t in dipeptide:
code.append(myDict[t] / sum)
encodings.append(code)
return encodings
def AAINDEX(seq):
temp = "-" * (Max_length - len(seq))
seq += temp
AA = 'ARNDCQEGHILKMFPSTWYV'
fileAAindex = "data\\AAindex1.txt"
with open(fileAAindex) as f:
records = f.readlines()[1:]
AAindex = []
AAindexName = []
for i in records:
AAindex.append(i.rstrip().split()[1:] if i.rstrip() != '' else None)
AAindexName.append(i.rstrip().split()[0] if i.rstrip() != '' else None)
index = {}
for i in range(len(AA)):
index[AA[i]] = i
encodings = []
code = []
for aa in seq:
if aa == '-':
for j in AAindex:
code.append(0)
continue
for j in AAindex:
code.append(j[index[aa]])
encodings.append(code)
return encodings
def CTDT(seq):
group1 = {
'hydrophobicity_PRAM900101': 'RKEDQN',
'hydrophobicity_ARGP820101': 'QSTNGDE',
'hydrophobicity_ZIMJ680101': 'QNGSWTDERA',
'hydrophobicity_PONP930101': 'KPDESNQT',
'hydrophobicity_CASG920101': 'KDEQPSRNTG',
'hydrophobicity_ENGD860101': 'RDKENQHYP',
'hydrophobicity_FASG890101': 'KERSQD',
'normwaalsvolume': 'GASTPDC',
'polarity': 'LIFWCMVY',
'polarizability': 'GASDT',
'charge': 'KR',
'secondarystruct': 'EALMQKRH',
'solventaccess': 'ALFCGIVW'
}
group2 = {
'hydrophobicity_PRAM900101': 'GASTPHY',
'hydrophobicity_ARGP820101': 'RAHCKMV',
'hydrophobicity_ZIMJ680101': 'HMCKV',
'hydrophobicity_PONP930101': 'GRHA',
'hydrophobicity_CASG920101': 'AHYMLV',
'hydrophobicity_ENGD860101': 'SGTAW',
'hydrophobicity_FASG890101': 'NTPG',
'normwaalsvolume': 'NVEQIL',
'polarity': 'PATGS',
'polarizability': 'CPNVEQIL',
'charge': 'ANCQGHILMFPSTWYV',
'secondarystruct': 'VIYCWFT',
'solventaccess': 'RKQEND'
}
group3 = {
'hydrophobicity_PRAM900101': 'CLVIMFW',
'hydrophobicity_ARGP820101': 'LYPFIW',
'hydrophobicity_ZIMJ680101': 'LPFYI',
'hydrophobicity_PONP930101': 'YMFWLCVI',
'hydrophobicity_CASG920101': 'FIWC',
'hydrophobicity_ENGD860101': 'CVLIMF',
'hydrophobicity_FASG890101': 'AYHWVMFLIC',
'normwaalsvolume': 'MHKFRYW',
'polarity': 'HQRKNED',
'polarizability': 'KMHFRYW',
'charge': 'DE',
'secondarystruct': 'GNPSD',
'solventaccess': 'MSPTHY'
}
groups = [group1, group2, group3]
property = (
'hydrophobicity_PRAM900101', 'hydrophobicity_ARGP820101', 'hydrophobicity_ZIMJ680101',
'hydrophobicity_PONP930101',
'hydrophobicity_CASG920101', 'hydrophobicity_ENGD860101', 'hydrophobicity_FASG890101', 'normwaalsvolume',
'polarity', 'polarizability', 'charge', 'secondarystruct', 'solventaccess')
encodings = []
code = []
aaPair = [seq[j:j + 2] for j in range(len(seq) - 1)]
for p in property:
c1221, c1331, c2332 = 0, 0, 0
for pair in aaPair:
if (pair[0] in group1[p] and pair[1] in group2[p]) or (pair[0] in group2[p] and pair[1] in group1[p]):
c1221 = c1221 + 1
continue
if (pair[0] in group1[p] and pair[1] in group3[p]) or (pair[0] in group3[p] and pair[1] in group1[p]):
c1331 = c1331 + 1
continue
if (pair[0] in group2[p] and pair[1] in group3[p]) or (pair[0] in group3[p] and pair[1] in group2[p]):
c2332 = c2332 + 1
code = code + [c1221 / len(aaPair), c1331 / len(aaPair), c2332 / len(aaPair)]
encodings.append(code)
return encodings
def Geary(seq, props=['CIDH920105', 'BHAR880101', 'CHAM820101', 'CHAM820102',
'CHOC760101', 'BIGC670101', 'CHAM810101', 'DAYM780201'],
nlag=2):
AA = 'ARNDCQEGHILKMFPSTWYV'
fileAAidx = "data\\AAidx.txt"
with open(fileAAidx) as f:
records = f.readlines()[1:]
myDict = {}
for i in records:
array = i.rstrip().split('\t')
myDict[array[0]] = array[1:]
AAidx = []
AAidxName = []
for i in props:
if i in myDict:
AAidx.append(myDict[i])
AAidxName.append(i)
else:
print('"' + i + '" properties not exist.')
return None
AAidx1 = np.array([float(j) for i in AAidx for j in i])
AAidx = AAidx1.reshape((len(AAidx), 20))
propMean = np.mean(AAidx, axis=1)
propStd = np.std(AAidx, axis=1)
for i in range(len(AAidx)):
for j in range(len(AAidx[i])):
AAidx[i][j] = (AAidx[i][j] - propMean[i]) / propStd[i]
index = {}
for i in range(len(AA)):
index[AA[i]] = i
encodings = []
code = []
N = len(seq)
for prop in range(len(props)):
xmean = sum([AAidx[prop][index[aa]] for aa in seq]) / N
for n in range(1, nlag + 1):
if len(seq) > nlag:
# if key is '-', then the value is 0
rn = (N - 1) / (2 * (N - n)) * ((sum(
[(AAidx[prop][index.get(seq[j], 0)] - AAidx[prop][index.get(seq[j + n], 0)]) ** 2 for
j in range(len(seq) - n)])) / (sum(
[(AAidx[prop][index.get(seq[j], 0)] - xmean) ** 2 for j in range(len(seq))])))
else:
rn = 'NA'
code.append(rn)
encodings.append(code)
return encodings
def CKSAAP(seq, gap=2, **kw):
if gap < 0:
print('Error: the gap should be equal or greater than zero' + '\n\n')
return 0
AA = 'ACDEFGHIKLMNPQRSTVWY'
encodings = []
aaPairs = []
for aa1 in AA:
for aa2 in AA:
aaPairs.append(aa1 + aa2)
code = []
for g in range(gap + 1):
myDict = {}
for pair in aaPairs:
myDict[pair] = 0
sum = 0
for index1 in range(len(seq)):
index2 = index1 + g + 1
if index1 < len(seq) and index2 < len(seq) and seq[index1] in AA and seq[
index2] in AA:
myDict[seq[index1] + seq[index2]] = myDict[seq[index1] + seq[index2]] + 1
sum = sum + 1
for pair in aaPairs:
code.append(myDict[pair] / sum)
encodings.append(code)
return encodings
def Rvalue(aa1, aa2, AADict, Matrix):
return sum([(Matrix[i][AADict[aa1]] - Matrix[i][AADict[aa2]]) ** 2 for i in range(len(Matrix))]) / len(Matrix)
def PAAC(seq, lambdaValue=3, w=0.05):
dataFile = 'data\PAAC.txt'
with open(dataFile) as f:
records = f.readlines()
AA = ''.join(records[0].rstrip().split()[1:])
AADict = {}
for i in range(len(AA)):
AADict[AA[i]] = i
AAProperty = []
AAPropertyNames = []
for i in range(1, len(records)):
array = records[i].rstrip().split() if records[i].rstrip() != '' else None
AAProperty.append([float(j) for j in array[1:]])
AAPropertyNames.append(array[0])
AAProperty1 = []
for i in AAProperty:
meanI = sum(i) / 20
fenmu = math.sqrt(sum([(j - meanI) ** 2 for j in i]) / 20)
AAProperty1.append([(j - meanI) / fenmu for j in i])
encodings = []
code = []
theta = []
for n in range(1, lambdaValue + 1):
theta.append(
sum([Rvalue(seq[j], seq[j + n], AADict, AAProperty1) for j in range(len(seq) - n)]) / (
len(seq) - n))
myDict = {}
for aa in AA:
myDict[aa] = seq.count(aa)
code = code + [myDict[aa] / (1 + w * sum(theta)) for aa in AA]
code = code + [(w * j) / (1 + w * sum(theta)) for j in theta]
encodings.append(code)
return encodings
# AFC-T, AFC-CP
def Feature(f):
amino_acids = "XACDEFGHIKLMNPQRSTVWY"
amino_acids_dict = {}
seqs = []
seqs_blosum62 = []
seqs_dde = []
seqs_z = []
seqs_dpc = []
seqs_aac = []
seqs_ctdd = []
lable_seqs = []
work2vec = []
seqs_sr = []
seqs_ksctriad = []
seqs_gtpc = []
seqs_cksaagp = []
seqs_gaac = []
seqs_gdpc = []
seqs_aaindex = []
seqs_ctdt = []
seqs_geary = []
seqs_cksaap = []
seqs_ctrial = []
seqs_paac = []
for n, s in enumerate(amino_acids):
amino_acids_dict[s] = n
#new_antifu = Word2Vec.load('fa_model_All.bin')
for n, s in enumerate(SeqIO.parse(f, "fasta")):
seq_blosum62 = BLOSUM62(s.seq)
seq_ksctriad = KSCTriad(s.seq)
seq_dde = DDE(s.seq)
seq_z = ZSCALE(s.seq)
seq_aac = AAC(s.seq)
seq_dpc = DPC(s.seq)
seq_ctdd = CTDD(s.seq)
seq_ctrial = CTriad(s.seq)
seq_gtpc = GTPC(s.seq)
seq_cksaagp = CKSAAGP(s.seq)
seq_gaac = GAAC(s.seq)
seq_gdpc = GDPC(s.seq)
seq_ctdt = CTDT(s.seq)
seq_geary = Geary(s.seq)
seq_cksaap = CKSAAP(s.seq)
seq_aaindex = AAINDEX(s.seq)
seq_paac = PAAC(s.seq)
seqs_dde.append(seq_dde)
seqs_z.append(seq_z)
seqs_aac.append(seq_aac)
seqs_dpc.append(seq_dpc)
seqs_ctdd.append(seq_ctdd)
seqs_blosum62.append(seq_blosum62)
seqs_ctrial.append(seq_ctrial)
seqs_ksctriad.append(seq_ksctriad)
seqs_gtpc.append(seq_gtpc)
seqs_cksaagp.append(seq_cksaagp)
seqs_gaac.append(seq_gaac)
seqs_gdpc.append(seq_gdpc)
seqs_ctdt.append(seq_ctdt)
seqs_geary.append(seq_geary)
seqs_cksaap.append(seq_cksaap)
seqs_aaindex.append(seq_aaindex)
seqs_paac.append(seq_paac)
temp_pad = []
temp_pad1 = []
temps = []
for i in range(20):
temp_pad1.append(0)
for i in range(Max_length - len(s)):
temps.append(temp_pad1)
for i in range(Max_length - len(str(s.seq))):
temp_pad.append(0)
train_seq = [amino_acids_dict[a.upper()] for a in str(s.seq).upper()] + temp_pad
seqs_sr.append(train_seq)
#aux_p3 = [new_antifu.wv[a] if a in "ACDEFGHIKLMNPQRSTVWY" else [0 for i in range(20)] for a in
#str(s.seq).upper()] + temps
#work2vec.append(aux_p3)
if s.id[-1] == "1":
#print(s.id)
lable_seqs.append([1])
else:
#print(s.id)
lable_seqs.append([0])
return seqs_blosum62, lable_seqs, work2vec, seqs_sr, seqs_dde, seqs_z, seqs_aac, seqs_dpc, seqs_ctdd, seqs_ctrial, seqs_ksctriad, seqs_gtpc, seqs_cksaagp, seqs_gaac, seqs_gdpc, seqs_ctdt, seqs_geary, seqs_cksaap, seqs_aaindex, seqs_paac
# AFC-C based on main dataset
def Feature1(f):
amino_acids = "XACDEFGHIKLMNPQRSTVWY"
amino_acids_dict = {}
seqs = []
seqs_blosum62 = []
seqs_dde = []
seqs_z = []
seqs_dpc = []
seqs_aac = []
seqs_ctdd = []
lable_seqs = []
work2vec = []
seqs_sr = []
seqs_ksctriad = []
seqs_gtpc = []
seqs_cksaagp = []
seqs_gaac = []
seqs_gdpc = []
seqs_aaindex = []
seqs_ctdt = []
seqs_geary = []
seqs_cksaap = []
seqs_ctrial = []
seqs_paac = []
for n, s in enumerate(amino_acids):
amino_acids_dict[s] = n
#new_antifu = Word2Vec.load('D:\E下载\Dataset\Dataset\\fa_model_All.bin')
for n, s in enumerate(SeqIO.parse(f, "fasta")):
seq_blosum62 = BLOSUM62(s.seq)
#seq_ksctriad = KSCTriad(s.seq)
seq_dde = DDE(s.seq)
seq_z = ZSCALE(s.seq)
seq_aac = AAC(s.seq)
seq_dpc = DPC(s.seq)
seq_ctdd = CTDD(s.seq)
#seq_ctrial = CTriad(s.seq)
seq_gtpc = GTPC(s.seq)
seq_cksaagp = CKSAAGP(s.seq)
seq_gaac = GAAC(s.seq)
seq_gdpc = GDPC(s.seq)
seq_ctdt = CTDT(s.seq)
seq_geary = Geary(s.seq)
#seq_cksaap = CKSAAP(s.seq)
seq_aaindex = AAINDEX(s.seq)
#seq_paac = PAAC(s.seq)
seqs_dde.append(seq_dde)
seqs_z.append(seq_z)
seqs_aac.append(seq_aac)
seqs_dpc.append(seq_dpc)
seqs_ctdd.append(seq_ctdd)
seqs_blosum62.append(seq_blosum62)
#seqs_ctrial.append(seq_ctrial)
#seqs_ksctriad.append(seq_ksctriad)
seqs_gtpc.append(seq_gtpc)
seqs_cksaagp.append(seq_cksaagp)
seqs_gaac.append(seq_gaac)
seqs_gdpc.append(seq_gdpc)
seqs_ctdt.append(seq_ctdt)
seqs_geary.append(seq_geary)
#seqs_cksaap.append(seq_cksaap)
seqs_aaindex.append(seq_aaindex)
#seqs_paac.append(seq_paac)
temp_pad = []
temp_pad1 = []
temps = []
for i in range(20):
temp_pad1.append(0)
for i in range(Max_length - len(s)):
temps.append(temp_pad1)
for i in range(Max_length - len(str(s.seq))):
temp_pad.append(0)
train_seq = [amino_acids_dict[a.upper()] for a in str(s.seq).upper()] + temp_pad
seqs_sr.append(train_seq)
#aux_p3 = [new_antifu.wv[a] if a in "ACDEFGHIKLMNPQRSTVWY" else [0 for i in range(20)] for a in
#str(s.seq).upper()] + temps
#work2vec.append(aux_p3)
if s.id[-1] == "1":
lable_seqs.append([1])
else:
lable_seqs.append([0])
return seqs_blosum62, lable_seqs, work2vec, seqs_sr, seqs_dde, seqs_z, seqs_aac, seqs_dpc, seqs_ctdd, seqs_ctrial, seqs_ksctriad, seqs_gtpc, seqs_cksaagp, seqs_gaac, seqs_gdpc, seqs_ctdt, seqs_geary, seqs_cksaap, seqs_aaindex, seqs_paac
# AFC-C based on alternate dataset
def Feature2(f):
amino_acids = "XACDEFGHIKLMNPQRSTVWY"
amino_acids_dict = {}
seqs = []
seqs_blosum62 = []
seqs_dde = []
seqs_z = []
seqs_dpc = []
seqs_aac = []
seqs_ctdd = []
lable_seqs = []
work2vec = []
seqs_sr = []
seqs_ksctriad = []
seqs_gtpc = []
seqs_cksaagp = []
seqs_gaac = []
seqs_gdpc = []
seqs_aaindex = []
seqs_ctdt = []
seqs_geary = []
seqs_cksaap = []
seqs_ctrial = []
seqs_paac = []
for n, s in enumerate(amino_acids):
amino_acids_dict[s] = n
#new_antifu = Word2Vec.load('D:\E下载\Dataset\Dataset\\fa_model_All.bin')
for n, s in enumerate(SeqIO.parse(f, "fasta")):
seq_blosum62 = BLOSUM62(s.seq)
#seq_ksctriad = KSCTriad(s.seq)
seq_dde = DDE(s.seq)
seq_z = ZSCALE(s.seq)
seq_aac = AAC(s.seq)
seq_dpc = DPC(s.seq)
seq_ctdd = CTDD(s.seq)
seq_ctrial = CTriad(s.seq)
seq_gtpc = GTPC(s.seq)
seq_cksaagp = CKSAAGP(s.seq)
seq_gaac = GAAC(s.seq)
seq_gdpc = GDPC(s.seq)
seq_ctdt = CTDT(s.seq)
seq_geary = Geary(s.seq)
#seq_cksaap = CKSAAP(s.seq)
seq_aaindex = AAINDEX(s.seq)
#seq_paac = PAAC(s.seq)
seqs_dde.append(seq_dde)
seqs_z.append(seq_z)
seqs_aac.append(seq_aac)
seqs_dpc.append(seq_dpc)
seqs_ctdd.append(seq_ctdd)
seqs_blosum62.append(seq_blosum62)
seqs_ctrial.append(seq_ctrial)
#seqs_ksctriad.append(seq_ksctriad)
seqs_gtpc.append(seq_gtpc)
seqs_cksaagp.append(seq_cksaagp)
seqs_gaac.append(seq_gaac)
seqs_gdpc.append(seq_gdpc)
seqs_ctdt.append(seq_ctdt)
seqs_geary.append(seq_geary)
#seqs_cksaap.append(seq_cksaap)
seqs_aaindex.append(seq_aaindex)
#seqs_paac.append(seq_paac)
temp_pad = []
temp_pad1 = []
temps = []
for i in range(20):
temp_pad1.append(0)
for i in range(Max_length - len(s)):
temps.append(temp_pad1)
for i in range(Max_length - len(str(s.seq))):
temp_pad.append(0)
train_seq = [amino_acids_dict[a.upper()] for a in str(s.seq).upper()] + temp_pad
seqs_sr.append(train_seq)
#aux_p3 = [new_antifu.wv[a] if a in "ACDEFGHIKLMNPQRSTVWY" else [0 for i in range(20)] for a in
#str(s.seq).upper()] + temps
#work2vec.append(aux_p3)
if s.id[-1] == "1":
lable_seqs.append([1])
else:
lable_seqs.append([0])
return seqs_blosum62, lable_seqs, work2vec, seqs_sr, seqs_dde, seqs_z, seqs_aac, seqs_dpc, seqs_ctdd, seqs_ctrial, seqs_ksctriad, seqs_gtpc, seqs_cksaagp, seqs_gaac, seqs_gdpc, seqs_ctdt, seqs_geary, seqs_cksaap, seqs_aaindex, seqs_paac
| 31.040214
| 241
| 0.501324
| 4,472
| 34,734
| 3.799195
| 0.094812
| 0.014126
| 0.019776
| 0.015539
| 0.724662
| 0.69741
| 0.67575
| 0.645968
| 0.623131
| 0.601883
| 0
| 0.071952
| 0.339379
| 34,734
| 1,118
| 242
| 31.067979
| 0.668483
| 0.055479
| 0
| 0.668177
| 0
| 0.002265
| 0.115224
| 0.046324
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03171
| false
| 0
| 0.006795
| 0.001133
| 0.074745
| 0.005663
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
08f98d32f073c8a759a51d5a1b5fc9a27ec1c07c
| 1,927
|
py
|
Python
|
python/http_request.py
|
MrVallentin/http_request
|
b21cb23ead1e3bc7176f09804f9cc9287b9f0168
|
[
"MIT"
] | null | null | null |
python/http_request.py
|
MrVallentin/http_request
|
b21cb23ead1e3bc7176f09804f9cc9287b9f0168
|
[
"MIT"
] | null | null | null |
python/http_request.py
|
MrVallentin/http_request
|
b21cb23ead1e3bc7176f09804f9cc9287b9f0168
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Author: Christian Vallentin <[email protected]>
# Website: http://vallentinsource.com
# Repository: https://github.com/MrVallentin/http_request
#
# Date Created: February 28, 2016
# Last Modified: February 29, 2016
#
# Developed and tested using Python 3.5.1
import http.client, urllib.parse
def http_request(url, data = None, method = "GET", headers = {}):
parsed = urllib.parse.urlparse(url)
scheme, netloc, path = parsed.scheme, parsed.netloc, parsed.path
if not method:
method = "GET"
method = method.upper()
if not headers:
headers = {}
if data:
data = urllib.parse.urlencode(data)
#data = data.encode("utf-8")
if method == "GET":
if data:
path += "?" + data
data = None
if not headers:
headers = {}
if data:
headers["Content-Length"] = len(data)
headers["Content-Type"] = "application/x-www-form-urlencoded"
conn = None
if scheme and scheme == "https":
conn = http.client.HTTPSConnection(netloc)
else:
conn = http.client.HTTPConnection(netloc)
conn.request(method, path, data, headers)
res = conn.getresponse()
res_status, res_reason = res.status, res.reason
res_body = res.read()
res_headers = res.getheaders()
conn.close()
res_body = res_body.decode("utf-8")
return res_body, res_status, res_reason, res_headers
def http_head(url, data = None, headers = None):
return http_request(url, data, "HEAD", headers)
def http_get(url, data = None, headers = None):
return http_request(url, data, "GET", headers)
def http_post(url, data = None, headers = None):
return http_request(url, data, "POST", headers)
def http_delete(url, data = None, headers = None):
return http_request(url, data, "DELETE", headers)
def http_put(url, data = None, headers = None):
return http_request(url, data, "PUT", headers)
def http_patch(url, data = None, headers = None):
return http_request(url, data, "PATCH", headers)
| 23.790123
| 65
| 0.694862
| 270
| 1,927
| 4.87037
| 0.307407
| 0.069202
| 0.074525
| 0.095817
| 0.293536
| 0.247909
| 0.209886
| 0.209886
| 0.209886
| 0.209886
| 0
| 0.010592
| 0.167099
| 1,927
| 80
| 66
| 24.0875
| 0.808723
| 0.155682
| 0
| 0.155556
| 0
| 0
| 0.064317
| 0.020408
| 0
| 0
| 0
| 0
| 0
| 1
| 0.155556
| false
| 0
| 0.022222
| 0.133333
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
3e9679307c3b46578c6f09f3d9606e2140ef17b5
| 1,253
|
py
|
Python
|
sandbox/images/reseau_serveur_utile.py
|
chiesax/sandbox
|
3b628e0068c6f7116c3a98d481299158a8bf5de3
|
[
"MIT"
] | null | null | null |
sandbox/images/reseau_serveur_utile.py
|
chiesax/sandbox
|
3b628e0068c6f7116c3a98d481299158a8bf5de3
|
[
"MIT"
] | 1
|
2015-12-29T09:38:21.000Z
|
2015-12-30T16:16:19.000Z
|
sandbox/images/reseau_serveur_utile.py
|
chiesax/sandbox
|
3b628e0068c6f7116c3a98d481299158a8bf5de3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.4
# -*-coding:utf-8 -*
"""
https://openclassrooms.com/courses/apprenez-a-programmer-en-python/le-reseau
À LA FIN LE TERMINAL QUITTE PYTHON :
DONC CE PROGRAMME NE MARCHE PAS !!
"""
# Les deux lignes précédentes serviraient si je rendais ce fichier
# directement exécutable
import socket
# Construire notre socket : LE SERVEUR
# socket.AF_INET : la famille d'adresses, ici ce sont des adresses Internet ;
#
# socket.SOCK_STREAM : le type du socket, SOCK_STREAM pour le protocole TCP.
connexion_principale = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print("connexion_principale :\n",connexion_principale)
# Connecter le socket
# le nom de l'hôte sera vide et le port sera celui que vous voulez, entre 1024 et 65535.
connexion_principale.bind(("",12800)) # l'argument unique de bind est un tuple !!
print("bind :\n",connexion_principale.bind)
# Faire écouter notre socket
connexion_principale.listen(5)
print("listen :\n",connexion_principale.listen)
print("salute")
# +++ Il y a donc deux ports dans notre histoire +++
#
# mais celui qu'utilise le client
# pour ouvrir sa connexion ne va pas nous intéresser.
connexion_avec_client,infos_connexion = connexion_principale.accept()
| 27.844444
| 89
| 0.739824
| 180
| 1,253
| 5.061111
| 0.611111
| 0.16685
| 0.052689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017143
| 0.162011
| 1,253
| 44
| 90
| 28.477273
| 0.850476
| 0.646449
| 0
| 0
| 0
| 0
| 0.115108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.444444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
3e9f295d7038dec0577aca1bae6bbad75edf434c
| 2,083
|
py
|
Python
|
src/hard/multi_search.py
|
JadielTeofilo/General-Algorithms
|
dfcf86c6ecd727573079f8971187c47bdb7a37bb
|
[
"MIT"
] | null | null | null |
src/hard/multi_search.py
|
JadielTeofilo/General-Algorithms
|
dfcf86c6ecd727573079f8971187c47bdb7a37bb
|
[
"MIT"
] | null | null | null |
src/hard/multi_search.py
|
JadielTeofilo/General-Algorithms
|
dfcf86c6ecd727573079f8971187c47bdb7a37bb
|
[
"MIT"
] | null | null | null |
"""
17.17 Multi Search: Given a string b and an array of smaller strings T, design a method to search b for
each small string in T.
In - text: str, words: List[str]
Out - List[str]
lgget`s go to the party tonight?
['go', 'test', 'jump']
return ['go']
O(k^2 + n*t) time complexity, where k is the size of text, n is the size of words, and t is the size of the biggest word
O(k^2) space complexity
"""
from typing import Dict, Any, List
TrieNode = Dict[str, Any]
class Trie:
def __init__(self) -> None:
self.trie: TrieNode = {'children': {}}
def insert(self, word: str) -> None:
if not word:
return
self._insert_helper(word, self.trie)
def _insert_helper(self, word: str, node: TrieNode) -> None:
if not word:
return
target_char: str = word[0]
if target_char not in node['children']:
node['children'][target_char] = {'children': {}}
return self._insert_helper(
word[1:],
node['children'][target_char],
)
def search(self, word: str) -> bool:
if not word:
raise ValueError('Empty input')
return self._search_helper(word, self.trie)
def _search_helper(self, word: str, node: TrieNode) -> bool:
if not word:
return True
target_char: str = word[0]
if target_char not in node['children']:
return False
return self._search_helper(
word[1:],
node['children'][target_char],
)
def multi_search(text: str, words: List[str]) -> List[str]:
# TODO validate input
trie: Trie = build_trie(text)
result: List[str] = []
for word in words:
if trie.search(word):
result.append(word)
return result
def build_trie(text: str) -> Trie:
"""
Inserts all possible substrings on the trie
"""
trie: Trie = Trie()
for i in range(len(text)):
trie.insert(text[i:])
return trie
print(multi_search('lgget`s go to the party tonight?', ['go', 'test', 'jump']))
| 24.797619
| 120
| 0.586174
| 291
| 2,083
| 4.103093
| 0.309278
| 0.058626
| 0.036851
| 0.027638
| 0.393635
| 0.246231
| 0.197655
| 0.197655
| 0.137353
| 0.137353
| 0
| 0.006775
| 0.291407
| 2,083
| 83
| 121
| 25.096386
| 0.802168
| 0.221795
| 0
| 0.304348
| 0
| 0
| 0.068424
| 0
| 0
| 0
| 0
| 0.012048
| 0
| 1
| 0.152174
| false
| 0
| 0.021739
| 0
| 0.391304
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3ea75b500ba52b9dd243fdb83d052c734dde9d13
| 477
|
py
|
Python
|
invenio_oarepo_oai_pmh_harvester/rules/uk/creator.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
invenio_oarepo_oai_pmh_harvester/rules/uk/creator.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
invenio_oarepo_oai_pmh_harvester/rules/uk/creator.py
|
Semtexcz/invenio-oarepo-oai-pmh-harvester
|
2866c7d7355f6885b4f443ee1e82baa24502b36e
|
[
"MIT"
] | null | null | null |
from invenio_oarepo_oai_pmh_harvester.register import Decorators
from invenio_oarepo_oai_pmh_harvester.rules.utils import iter_array
from invenio_oarepo_oai_pmh_harvester.transformer import OAITransformer
@Decorators.rule("xoai")
@Decorators.pre_rule("/dc/creator")
def transform_creator(paths, el, results, phase, **kwargs):
results[-1]["creator"] = [
{
"name": x
} for x in iter_array(el["value"])
]
return OAITransformer.PROCESSED
| 29.8125
| 71
| 0.735849
| 60
| 477
| 5.583333
| 0.583333
| 0.098507
| 0.152239
| 0.179104
| 0.286567
| 0.286567
| 0
| 0
| 0
| 0
| 0
| 0.0025
| 0.161426
| 477
| 15
| 72
| 31.8
| 0.835
| 0
| 0
| 0
| 0
| 0
| 0.06499
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.25
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3eaea2f364b0232cd2155f284fa3e784667552cf
| 321
|
py
|
Python
|
card_collector_db/urls.py
|
tyhunt99/card-collector-db
|
932bd829eb46f9492e6a25326140823629161bab
|
[
"MIT"
] | null | null | null |
card_collector_db/urls.py
|
tyhunt99/card-collector-db
|
932bd829eb46f9492e6a25326140823629161bab
|
[
"MIT"
] | 4
|
2020-06-05T20:53:52.000Z
|
2022-02-10T08:32:51.000Z
|
card_collector_db/urls.py
|
tyhunt99/card-collector-db
|
932bd829eb46f9492e6a25326140823629161bab
|
[
"MIT"
] | null | null | null |
'''
cardcollectordb URL Configuration
'''
from django.contrib import admin
from django.urls import include, path
from account.views import SignUp
urlpatterns = [
path('admin/', admin.site.urls),
path('account/', include('account.urls'), name='account'),
path('signup/', SignUp.as_view(), name='signup'),
]
| 21.4
| 62
| 0.697819
| 39
| 321
| 5.717949
| 0.487179
| 0.089686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140187
| 321
| 14
| 63
| 22.928571
| 0.807971
| 0.102804
| 0
| 0
| 0
| 0
| 0.164286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
3ebef9e4fbaf39a788a4ad55fc4f3d9f0f5242a4
| 3,659
|
py
|
Python
|
site-packages/cinderclient/tests/unit/v2/test_cgsnapshots.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | null | null | null |
site-packages/cinderclient/tests/unit/v2/test_cgsnapshots.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | 1
|
2018-09-10T23:44:02.000Z
|
2018-09-12T16:28:07.000Z
|
site-packages/cinderclient/tests/unit/v2/test_cgsnapshots.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | 2
|
2018-09-07T23:17:23.000Z
|
2019-01-11T16:10:08.000Z
|
# Copyright (C) 2012 - 2014 EMC Corporation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinderclient.tests.unit import utils
from cinderclient.tests.unit.v2 import fakes
cs = fakes.FakeClient()
class cgsnapshotsTest(utils.TestCase):
def test_delete_cgsnapshot(self):
v = cs.cgsnapshots.list()[0]
vol = v.delete()
self._assert_request_id(vol)
cs.assert_called('DELETE', '/cgsnapshots/1234')
vol = cs.cgsnapshots.delete('1234')
cs.assert_called('DELETE', '/cgsnapshots/1234')
self._assert_request_id(vol)
vol = cs.cgsnapshots.delete(v)
cs.assert_called('DELETE', '/cgsnapshots/1234')
self._assert_request_id(vol)
def test_create_cgsnapshot(self):
vol = cs.cgsnapshots.create('cgsnap')
cs.assert_called('POST', '/cgsnapshots')
self._assert_request_id(vol)
def test_create_cgsnapshot_with_cg_id(self):
vol = cs.cgsnapshots.create('1234')
expected = {'cgsnapshot': {'status': 'creating',
'description': None,
'user_id': None,
'name': None,
'consistencygroup_id': '1234',
'project_id': None}}
cs.assert_called('POST', '/cgsnapshots', body=expected)
self._assert_request_id(vol)
def test_update_cgsnapshot(self):
v = cs.cgsnapshots.list()[0]
expected = {'cgsnapshot': {'name': 'cgs2'}}
vol = v.update(name='cgs2')
cs.assert_called('PUT', '/cgsnapshots/1234', body=expected)
self._assert_request_id(vol)
vol = cs.cgsnapshots.update('1234', name='cgs2')
cs.assert_called('PUT', '/cgsnapshots/1234', body=expected)
self._assert_request_id(vol)
vol = cs.cgsnapshots.update(v, name='cgs2')
cs.assert_called('PUT', '/cgsnapshots/1234', body=expected)
self._assert_request_id(vol)
def test_update_cgsnapshot_no_props(self):
cs.cgsnapshots.update('1234')
def test_list_cgsnapshot(self):
lst = cs.cgsnapshots.list()
cs.assert_called('GET', '/cgsnapshots/detail')
self._assert_request_id(lst)
def test_list_cgsnapshot_detailed_false(self):
lst = cs.cgsnapshots.list(detailed=False)
cs.assert_called('GET', '/cgsnapshots')
self._assert_request_id(lst)
def test_list_cgsnapshot_with_search_opts(self):
lst = cs.cgsnapshots.list(search_opts={'foo': 'bar'})
cs.assert_called('GET', '/cgsnapshots/detail?foo=bar')
self._assert_request_id(lst)
def test_list_cgsnapshot_with_empty_search_opt(self):
lst = cs.cgsnapshots.list(search_opts={'foo': 'bar', '123': None})
cs.assert_called('GET', '/cgsnapshots/detail?foo=bar')
self._assert_request_id(lst)
def test_get_cgsnapshot(self):
cgsnapshot_id = '1234'
vol = cs.cgsnapshots.get(cgsnapshot_id)
cs.assert_called('GET', '/cgsnapshots/%s' % cgsnapshot_id)
self._assert_request_id(vol)
| 38.515789
| 78
| 0.639519
| 447
| 3,659
| 5.024609
| 0.284116
| 0.081033
| 0.098397
| 0.109973
| 0.557881
| 0.44301
| 0.412289
| 0.387355
| 0.380232
| 0.295191
| 0
| 0.025171
| 0.239956
| 3,659
| 94
| 79
| 38.925532
| 0.782452
| 0.167532
| 0
| 0.359375
| 0
| 0
| 0.140594
| 0.017822
| 0
| 0
| 0
| 0
| 0.40625
| 1
| 0.15625
| false
| 0
| 0.03125
| 0
| 0.203125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3ee683dea81950011c4a8893e44e207f0c0558cc
| 275
|
py
|
Python
|
2016/Day5/tests.py
|
dh256/adventofcode
|
428eec13f4cbf153333a0e359bcff23070ef6d27
|
[
"MIT"
] | null | null | null |
2016/Day5/tests.py
|
dh256/adventofcode
|
428eec13f4cbf153333a0e359bcff23070ef6d27
|
[
"MIT"
] | null | null | null |
2016/Day5/tests.py
|
dh256/adventofcode
|
428eec13f4cbf153333a0e359bcff23070ef6d27
|
[
"MIT"
] | null | null | null |
import pytest
from Door import Door
def test_find_password():
door_id = "abc"
door = Door(door_id)
assert(door.find_password() == "18f47a30")
def test_find_password2():
door_id = "abc"
door = Door(door_id)
assert(door.find_password2() == "05ace8e3")
| 22.916667
| 47
| 0.676364
| 38
| 275
| 4.631579
| 0.368421
| 0.136364
| 0.125
| 0.147727
| 0.420455
| 0.420455
| 0.420455
| 0.420455
| 0.420455
| 0.420455
| 0
| 0.054054
| 0.192727
| 275
| 12
| 47
| 22.916667
| 0.738739
| 0
| 0
| 0.4
| 0
| 0
| 0.07971
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0.4
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
3eea8ad1e4ebfd2294a6137803d28554a3bc0130
| 5,621
|
py
|
Python
|
tools/perf/benchmarks/smoothness.py
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2020-01-25T10:18:18.000Z
|
2021-01-23T15:29:56.000Z
|
tools/perf/benchmarks/smoothness.py
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/perf/benchmarks/smoothness.py
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:24:13.000Z
|
2020-11-04T07:24:13.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from benchmarks import silk_flags
from measurements import smoothness
import page_sets
@test.Disabled # crbug.com/368767
class SmoothnessTop25(test.Test):
"""Measures rendering statistics while scrolling down the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = smoothness.Smoothness
page_set = page_sets.Top25PageSet
@test.Disabled('linux', 'mac', 'win') # crbug.com/368767
class SmoothnessToughCanvasCases(test.Test):
test = smoothness.Smoothness
page_set = page_sets.ToughCanvasCasesPageSet
@test.Disabled # crbug.com/373812
class SmoothnessToughWebGLCases(test.Test):
test = smoothness.Smoothness
page_set = page_sets.ToughWebglCasesPageSet
class SmoothnessMaps(test.Test):
test = smoothness.Smoothness
page_set = page_sets.MapsPageSet
class SmoothnessKeyMobileSites(test.Test):
"""Measures rendering statistics while scrolling down the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = smoothness.Smoothness
page_set = page_sets.KeyMobileSitesPageSet
@test.Disabled('android', 'mac') # crbug.com/350692, crbug.com/368767
class SmoothnessToughAnimationCases(test.Test):
test = smoothness.Smoothness
page_set = page_sets.ToughAnimationCasesPageSet
class SmoothnessKeySilkCases(test.Test):
"""Measures rendering statistics for the key silk cases without GPU
rasterization
"""
test = smoothness.Smoothness
page_set = page_sets.KeySilkCasesPageSet
class SmoothnessFastPathKeySilkCases(test.Test):
"""Measures rendering statistics for the key silk cases without GPU
rasterization using bleeding edge rendering fast paths.
"""
tag = 'fast_path'
test = smoothness.Smoothness
page_set = page_sets.KeySilkCasesPageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForFastPath(options)
@test.Disabled('android') # crbug.com/363783
class SmoothnessGpuRasterizationTop25(test.Test):
"""Measures rendering statistics for the top 25 with GPU rasterization
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.Top25PageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@test.Disabled('android') # crbug.com/363783
class SmoothnessGpuRasterizationKeyMobileSites(test.Test):
"""Measures rendering statistics for the key mobile sites with GPU
rasterization
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.KeyMobileSitesPageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
class SmoothnessGpuRasterizationKeySilkCases(test.Test):
"""Measures rendering statistics for the key silk cases with GPU rasterization
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.KeySilkCasesPageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
class SmoothnessFastPathGpuRasterizationKeySilkCases(
SmoothnessGpuRasterizationKeySilkCases):
"""Measures rendering statistics for the key silk cases with GPU rasterization
using bleeding edge rendering fast paths.
"""
tag = 'fast_path_gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.KeySilkCasesPageSet
def CustomizeBrowserOptions(self, options):
super(SmoothnessFastPathGpuRasterizationKeySilkCases, self). \
CustomizeBrowserOptions(options)
silk_flags.CustomizeBrowserOptionsForFastPath(options)
@test.Enabled('android')
class SmoothnessToughPinchZoomCases(test.Test):
"""Measures rendering statistics for pinch-zooming into the tough pinch zoom
cases
"""
test = smoothness.Smoothness
page_set = page_sets.ToughPinchZoomCasesPageSet
@test.Disabled # crbug.com/370725
class SmoothnessPolymer(test.Test):
"""Measures rendering statistics for Polymer cases.
"""
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
@test.Disabled # crbug.com/370725
class SmoothnessFastPathPolymer(test.Test):
"""Measures rendering statistics for the Polymer cases without GPU
rasterization using bleeding edge rendering fast paths.
"""
tag = 'fast_path'
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForFastPath(options)
@test.Disabled # crbug.com/370725
class SmoothnessGpuRasterizationPolymer(test.Test):
"""Measures rendering statistics for the Polymer cases with GPU rasterization
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@test.Disabled # crbug.com/370725
class SmoothnessFastPathGpuRasterizationPolymer(
SmoothnessGpuRasterizationPolymer):
"""Measures rendering statistics for the Polymer cases with GPU rasterization
using bleeding edge rendering fast paths.
"""
tag = 'fast_path_gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
def CustomizeBrowserOptions(self, options):
super(SmoothnessFastPathGpuRasterizationPolymer, self). \
CustomizeBrowserOptions(options)
silk_flags.CustomizeBrowserOptionsForFastPath(options)
| 32.871345
| 80
| 0.794698
| 587
| 5,621
| 7.51448
| 0.20954
| 0.034459
| 0.092496
| 0.107912
| 0.715484
| 0.713217
| 0.681025
| 0.625028
| 0.565178
| 0.449558
| 0
| 0.016762
| 0.129692
| 5,621
| 170
| 81
| 33.064706
| 0.884914
| 0.290162
| 0
| 0.65625
| 0
| 0
| 0.046992
| 0.013943
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.739583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3eeacd092135bae68f848fe55ada198c37b80c48
| 315
|
py
|
Python
|
setup.py
|
igorccouto/robot
|
67019ee1f52d5474ce36734c1e56725046471cd9
|
[
"MIT"
] | null | null | null |
setup.py
|
igorccouto/robot
|
67019ee1f52d5474ce36734c1e56725046471cd9
|
[
"MIT"
] | null | null | null |
setup.py
|
igorccouto/robot
|
67019ee1f52d5474ce36734c1e56725046471cd9
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
setup(name='robot',
version='1.0',
author='Igor Couto',
author_email='[email protected]',
description='A project to execute a robot that performs several actions on the browser.',
packages=find_packages(),
license='MIT'
)
| 28.636364
| 96
| 0.660317
| 39
| 315
| 5.25641
| 0.820513
| 0.117073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.231746
| 315
| 10
| 97
| 31.5
| 0.838843
| 0
| 0
| 0
| 0
| 0
| 0.37377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
3ef267c5d2fee953bb87ccf05e6b6f25d5d276e4
| 3,632
|
py
|
Python
|
motorisedcameratracking/Cameras.py
|
wDove1/motorisedcameratracking
|
97ae1722978fc99faf37c0ab7e3c8f39e3e2355d
|
[
"Apache-2.0"
] | null | null | null |
motorisedcameratracking/Cameras.py
|
wDove1/motorisedcameratracking
|
97ae1722978fc99faf37c0ab7e3c8f39e3e2355d
|
[
"Apache-2.0"
] | null | null | null |
motorisedcameratracking/Cameras.py
|
wDove1/motorisedcameratracking
|
97ae1722978fc99faf37c0ab7e3c8f39e3e2355d
|
[
"Apache-2.0"
] | null | null | null |
from picamera import PiCamera
from time import *
from cv2 import *
import numpy as np
import os
from PIL import Image
from .errors import *
class RPICam:
"""A class for the raspberry pi camera
Attributes:
modelDetails: A dictionary containing details of the Camera
imagePath: The path where the image will be stored and found
orientation: The orientation of the image
"""
modelDetails={'name':'rPi_NoIRv2','width':3280,'height':2464}
imagePath: str = None
orientation: float = None
def __init__(self,imagePath: str,orientation: str):
"""sets the values of imagePath and orientation
Args:
imagePath: the path to the image
orientation: the orientation of the camera
"""
self.imagePath=imagePath
self.orientation=orientation
def getImage(self,resolution: list = [1280,720]):
"""Gets the image from the camera
Args:
resolution: The resolution the camera will capture at
"""
camera=PiCamera()#creates a camera object
camera.resolution=(resolution[0],resolution[1])#sets the resolution
camera.capture(self.imagePath)#captures the image
camera.close()#closes the camera to prevent errros
def postProcessing(self):
"""loads the image and converts it to a numPy array
Returns:
img: A numPy array containing the image
"""
img=cv2.imread(self.imagePath,1)#convert image to a colour array
os.remove(self.imagePath)#removes the image from the location it was saved
return img
def orientationCorrection(self):
"""Corrects the orientation of the image
Runs on the image before it is opened so may need replacing
"""
img = Image.open(self.imagePath)#opens the image
img = img.rotate(-self.orientation)#rotates the image
img = img.save(self.imagePath)#saves the image to the same location
def capture(self,resolution: list =[1280,720]):
"""The outward facing method to capture an image
returns:
numPy array: contains the image
"""
self.getImage(resolution)#gets the image
self.orientationCorrection()#corrects the orientation
return self.postProcessing()#returns the image as a numPy array
def getData(self):
return self.modelDetails[self.model]
def getModel(self):
return self.model
def getImagePath(self):
return self.imagePath
def getModelDetails(self):
return self.modelDetails
class GenericCamera:
"""A generic camera using openCV"""
camera=cv2.VideoCapture(1)
def __init__(self):
pass
def capture(self):
x,frame = camera.read()
return frame
class VirtualCamera:
"""A virtual camera primarily for testing as it has no practical purpose other than this"""
images=[]
#images.append(np.array())
def __init__(self):
pass
def capture(self):
raise FeatureNotImplementedError()#raise an error as this is not implemented
#return None
| 35.262136
| 99
| 0.566079
| 387
| 3,632
| 5.27907
| 0.361757
| 0.062653
| 0.027411
| 0.0279
| 0.096427
| 0.02839
| 0.02839
| 0
| 0
| 0
| 0
| 0.013049
| 0.367015
| 3,632
| 102
| 100
| 35.607843
| 0.875598
| 0.348844
| 0
| 0.115385
| 0
| 0
| 0.012352
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.038462
| 0.134615
| 0.076923
| 0.673077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.