message
stringlengths 13
484
| diff
stringlengths 38
4.63k
|
---|---|
Speed up initial mib sync performance
Store the onu omci mib in memory only.
This speeds up onu activation considerably especially on hardware
constrained systems.
Future plans are to re-introduce mib persistence along with the
use of the 2.0 core and per adapter processes. | @@ -40,8 +40,8 @@ from voltha.extensions.omci.tasks.omci_sw_image_upgrade_task import OmciSwImageU
OpenOmciAgentDefaults = {
'mib-synchronizer': {
'state-machine': MibSynchronizer, # Implements the MIB synchronization state machine
- # 'database': MibDbVolatileDict, # Implements volatile ME MIB database
- 'database': MibDbExternal, # Implements persistent ME MIB database
+ 'database': MibDbVolatileDict, # Implements volatile ME MIB database
+ # 'database': MibDbExternal, # Implements persistent ME MIB database
'advertise-events': True, # Advertise events on OpenOMCI event bus
'tasks': {
'mib-upload': MibUploadTask,
|
Filter out None matches in match queries
Can happen if a race condition occurs when deleting matches | @@ -28,7 +28,7 @@ class EventMatchesQuery(DatabaseQuery):
event_key = self._query_args[0]
match_keys = yield Match.query(Match.event == ndb.Key(Event, event_key)).fetch_async(keys_only=True)
matches = yield ndb.get_multi_async(match_keys)
- raise ndb.Return(matches)
+ raise ndb.Return(filter(None, matches))
class TeamEventMatchesQuery(DatabaseQuery):
@@ -44,7 +44,7 @@ class TeamEventMatchesQuery(DatabaseQuery):
Match.team_key_names == team_key,
Match.event == ndb.Key(Event, event_key)).fetch_async(keys_only=True)
matches = yield ndb.get_multi_async(match_keys)
- raise ndb.Return(matches)
+ raise ndb.Return(filter(None, matches))
class TeamYearMatchesQuery(DatabaseQuery):
@@ -60,4 +60,4 @@ class TeamYearMatchesQuery(DatabaseQuery):
Match.team_key_names == team_key,
Match.year == year).fetch_async(keys_only=True)
matches = yield ndb.get_multi_async(match_keys)
- raise ndb.Return(matches)
+ raise ndb.Return(filter(None, matches))
|
Adding additional logging of the root exception.
Helps to track down issues with underlying rendering. | @@ -17,6 +17,7 @@ class RenderedController(controllers.BaseController):
def __init__(self, view=None, doc=None, path=None, _pod=None):
self.view = view
self.path = path
+ self._pod = _pod
if doc:
self._pod_path = doc.pod_path
self._locale = str(doc.locale)
@@ -99,6 +100,8 @@ class RenderedController(controllers.BaseController):
exception.traceback = sys.exc_info()[2]
exception.controller = self
exception.exception = e
+ if self._pod:
+ self._pod.logger.error(text.format(self, e))
raise exception
def _inject_ui(self, content, preprocessor, translator):
|
Added instructions for emulating terminal PyCharm
Updated instructions for emulating terminal in PyCharm Professional | @@ -156,8 +156,7 @@ asciimatics will not work. There are 2 workarounds.
1. The simplest is just to run asciimatics inside a real terminal or window - i.e. not inside
PyCharm/the IDE.
-2. If you must run inside PyCharm, the only option I've got working so far is the tests but even
- some of them need to skip where they cannot actually run. To run from the IDE, you must start a
+2. If you must run inside PyCharm, the Professional edition offers an option to emulate console output directly in PyCharm. To enable this functionality, see *Run | Edit Configurations | Configuration | Execution | Emulate terminal in output console*, otherwise you must start a
real console from the Terminal window e.g. using `start cmd /c "python <your file name>"`.
.. _unicode-issues-ref:
|
Update README.md
Crossed out completed milestones and added new module | @@ -38,7 +38,7 @@ If you're interested in writing your own modules for Pacu, check out our [Module
* 0.1 - Beta release (June 26th, 2018)
* 0.2 - 0.X - Beta releases
- * Proxy+Stager for routing Pacu activity through a compromised host.
+ * ~~Proxy+Stager for routing Pacu activity through a compromised host.~~
* Easy-install script
* Improved command syntax
* Filter available modules by category and required permissions/data
@@ -59,7 +59,7 @@ If you're interested in writing your own modules for Pacu, check out our [Module
* PinPoint SMS/Email/Mobile Push Abuse
* S3 Item Interception
* Download CloudWatch Logs
-* Enumerate Lambda Functions
+* ~~Enumerate Lambda Functions~~
* Create API Gateway Keys
* And many more.
@@ -118,6 +118,7 @@ Pacu's capabilities will increase dramatically as the number of modules grows, a
- `enum_ec2_termination_protection` - Collects a list of EC2 instances without termination protection.
- `enum_elb_logging` - Collects a list of Elastic Load Balancers without access logging.
- `enum_glue` - Enumerates Glue connections, crawlers, databases, development endpoints, and jobs.
+- `enum_lambda` - Pulls data related to Lambda Functions, source code, aliases, event source mappings, versions, tags, and policies.
- `enum_monitoring` - Detects monitoring and logging capabilities.
- `enum_users_roles_policies_groups` - Enumerates users, roles, customer-managed policies, and groups.
- `get_credential_report` - Generates and downloads an IAM credential report.
|
[Datasets] Improve `batch_format` error message
To improve the batch_format error message. | @@ -147,8 +147,8 @@ def _format_batch(batch: Block, batch_format: str) -> BatchType:
batch = BlockAccessor.for_block(batch).to_numpy()
else:
raise ValueError(
- f"The given batch format: {batch_format} "
- f"is invalid. Supported batch type: {BatchType}"
+ f"The given batch format '{batch_format}' is invalid. Supported "
+ f"`batch_format` values: {'default', 'pandas', 'pyarrow', 'numpy'}."
)
return batch
|
fix the building and installing doc of inference lib
The building and installing documentation should be updated because that the code of inference lib has changed. The current bug is that most of files under the inference library path are missed. | * *CUDA Toolkit 8.0/9.0 with cuDNN v7.3+*
* *GPU's computing capability exceeds 1.0*
+Note: currently, the official Windows installation package only support CUDA 8.0/9.0 with single GPU, and don't support CUDA 9.1/9.2/10.0/10.1. if you need to use, please compile by yourself through the source code.
+
Please refer to the NVIDIA official documents for the installation process and the configuration methods of [CUDA](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/) and [cuDNN](https://docs.nvidia.com/deeplearning/sdk/cudnn-install/).
## Installation Method
|
Update documented default value for SendPushNotifications
* Update documented default value for SendPushNotifications
* Update config-settings.rst
Fixed formatting. | @@ -838,7 +838,7 @@ Enable Push Notifications
**False**: Mobile push notifications are disabled.
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| This feature's ``config.json`` setting is ``"SendPushNotifications": false`` with options ``true`` and ``false``. |
+| This feature's ``config.json`` setting is ``"SendPushNotifications": true`` with options ``true`` and ``false``. |
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------+
Push Notification Server
|
tests: Do not download Django from Git
This test takes a very long time, because it downloads Django from
GitHub. There should be no reason to do that, since we can use local
packages as well. | @@ -324,10 +324,13 @@ def test_upgrade_packages_option(tmpdir):
def test_generate_hashes_with_editable():
+ small_fake_package_dir = os.path.join(
+ os.path.split(__file__)[0], 'fixtures', 'small_fake_package')
+ small_fake_package_url = 'file:' + pathname2url(small_fake_package_dir)
runner = CliRunner()
with runner.isolated_filesystem():
with open('requirements.in', 'w') as fp:
- fp.write('-e git+https://github.com/django/[email protected]#egg=django\n')
+ fp.write('-e {}\n'.format(small_fake_package_url))
fp.write('pytz==2017.2\n')
out = runner.invoke(cli, ['--generate-hashes'])
expected = (
@@ -337,10 +340,10 @@ def test_generate_hashes_with_editable():
'#\n'
'# pip-compile --generate-hashes --output-file requirements.txt requirements.in\n'
'#\n'
- '-e git+https://github.com/django/[email protected]#egg=django\n'
+ '-e {}\n'
'pytz==2017.2 \\\n'
' --hash=sha256:d1d6729c85acea5423671382868627129432fba9a89ecbb248d8d1c7a9f01c67 \\\n'
' --hash=sha256:f5c056e8f62d45ba8215e5cb8f50dfccb198b4b9fbea8500674f3443e4689589\n'
- )
+ ).format(small_fake_package_url)
assert out.exit_code == 0
assert expected in out.output
|
fix(device): add next/previous source for deCONZ and ZHA for E1810MediaPlayerController
related | @@ -105,6 +105,8 @@ class E1810MediaPlayerController(MediaPlayerController):
2003: MediaPlayer.RELEASE,
3001: MediaPlayer.HOLD_VOLUME_DOWN,
3003: MediaPlayer.RELEASE,
+ 4001: MediaPlayer.PREVIOUS_SOURCE,
+ 5001: MediaPlayer.NEXT_SOURCE,
}
def get_zha_actions_mapping(self) -> TypeActionsMapping:
@@ -119,6 +121,8 @@ class E1810MediaPlayerController(MediaPlayerController):
"stop": MediaPlayer.RELEASE,
"move_1_83": MediaPlayer.HOLD_VOLUME_DOWN,
"move_1_84": MediaPlayer.HOLD_VOLUME_DOWN, # ZigBee 3.0 firmware
+ "hold_3329_0": MediaPlayer.PREVIOUS_SOURCE,
+ "hold_3328_0": MediaPlayer.NEXT_SOURCE,
"release": MediaPlayer.RELEASE,
}
|
revert: server script errors
added a change where all exceptions are caught when a server
script is executed, which makes validation errors useless. reverting this
for now, until we find a better solution. | @@ -34,19 +34,7 @@ def run_server_script_for_doc_event(doc, event):
if scripts:
# run all scripts for this doctype + event
for script_name in scripts:
- try:
frappe.get_doc('Server Script', script_name).execute_doc(doc)
- except Exception as e:
- message = frappe._('Error executing Server Script {0}. Open Browser Console to see traceback.').format(
- frappe.utils.get_link_to_form('Server Script', script_name)
- )
- exception = type(e)
- if getattr(frappe, 'request', None):
- # all exceptions throw 500 which is internal server error
- # however server script error is a user error
- # so we should throw 417 which is expectation failed
- exception.http_status_code = 417
- frappe.throw(title=frappe._('Server Script Error'), msg=message, exc=exception)
def get_server_script_map():
# fetch cached server script methods
|
fix markdown for links in README
Some links were not displaying properly | @@ -280,7 +280,7 @@ to be atleast 224.
The images have to be loaded in to a range of [0, 1] and then
normalized using `mean=[0.485, 0.456, 0.406]` and `std=[0.229, 0.224, 0.225]`
-An example of such normalization can be found in `the imagenet example here` <https://github.com/pytorch/examples/blob/42e5b996718797e45c46a25c55b031e6768f8440/imagenet/main.py#L89-L101>
+An example of such normalization can be found in the imagenet example `here <https://github.com/pytorch/examples/blob/42e5b996718797e45c46a25c55b031e6768f8440/imagenet/main.py#L89-L101>`__
Transforms
==========
@@ -410,7 +410,7 @@ computing the ``(min, max)`` over all images.
``pad_value=<float>`` sets the value for the padded pixels.
-`Example usage is given in this notebook` <https://gist.github.com/anonymous/bf16430f7750c023141c562f3e9f2a91>
+Example usage is given in this `notebook <https://gist.github.com/anonymous/bf16430f7750c023141c562f3e9f2a91>`__
``save_image(tensor, filename, nrow=8, padding=2, normalize=False, range=None, scale_each=False, pad_value=0)``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
tests: UnconfiguredTree: fix path_restrict() tests due to missing SLOT
Create pkg objs while iterating over a repo now checks for valid SLOTs
otherwise the pkgs are added to the related masked repo. | @@ -100,11 +100,16 @@ class TestUnconfiguredTree(TempDirMixin):
ensure_dirs(pjoin(repo_dir, 'cat', 'bar'))
ensure_dirs(pjoin(repo_dir, 'tac', 'oof'))
touch(pjoin(repo_dir, 'skel.ebuild'))
- touch(pjoin(repo_dir, 'cat', 'foo', 'foo-1.ebuild'))
- touch(pjoin(repo_dir, 'cat', 'foo', 'foo-2.ebuild'))
touch(pjoin(repo_dir, 'cat', 'foo', 'Manifest'))
- touch(pjoin(repo_dir, 'cat', 'bar', 'bar-1.ebuild'))
- touch(pjoin(repo_dir, 'tac', 'oof', 'oof-1.ebuild'))
+ ebuilds = (
+ pjoin(repo_dir, 'cat', 'foo', 'foo-1.ebuild'),
+ pjoin(repo_dir, 'cat', 'foo', 'foo-2.ebuild'),
+ pjoin(repo_dir, 'cat', 'bar', 'bar-1.ebuild'),
+ pjoin(repo_dir, 'tac', 'oof', 'oof-1.ebuild'),
+ )
+ for ebuild in ebuilds:
+ with open(ebuild, 'w') as f:
+ f.write('SLOT=0\n')
# specify repo category dirs
with open(pjoin(repo_dir, 'profiles', 'categories'), 'w') as f:
|
Followup to commit
Use '~' as sed separator to avoid escaping filepaths | @@ -135,7 +135,7 @@ endif
GIT_TOOLKIT_VERSION_SHORT := $(shell git describe --abbrev=0)
# IF upload agent is built, dlls will be placed here, can be overriden with argument
# e.g. `make DLL_DEPS_FOLDER=C:\\toolkit-deps\\ pynsist_installer`
-DLL_DEPS_FOLDER = '..\/..\/src\/ua\/dist\/'
+DLL_DEPS_FOLDER = '../../src/ua/dist/'
pynsist_installer: toolkit_version $(DNANEXUS_HOME)/bin/dx dx-verify-file jq
@@ -144,7 +144,7 @@ pynsist_installer: toolkit_version $(DNANEXUS_HOME)/bin/dx dx-verify-file jq
sed s/TEMPLATE_STRING_TOOLKIT_VERSION/$${GIT_TOOLKIT_VERSION_SHORT}/ "$(DNANEXUS_HOME)"/build/pynsist_files/installer.cfg.template > "$(DNANEXUS_HOME)"/build/pynsist_files/installer.cfg
# Insert dll foldername
export DLL_DEPS_FOLDER=$(DLL_DEPS_FOLDER) ; \
- sed --in-place s/DLL_DEPS_FOLDER/$(DLL_DEPS_FOLDER)/ "$(DNANEXUS_HOME)"/build/pynsist_files/installer.cfg
+ sed --in-place "s~DLL_DEPS_FOLDER~$${DLL_DEPS_FOLDER}~" "$(DNANEXUS_HOME)"/build/pynsist_files/installer.cfg
# Copy wheel file into place without changing its filename
export DXPY_WHEEL_FILENAME=$$(basename $(DNANEXUS_HOME)/src/python/dist/dxpy-*.whl) ; \
|
[Docs] Add documentation for FeatureSet.plot
* [Doc] Add documentation for FeatureSet.plot
Add missing documentation for FeatureSet.plot
* missing one param
* solved comments | @@ -1073,11 +1073,11 @@ class FlowStep(BaseStep):
def plot(self, filename=None, format=None, source=None, targets=None, **kw):
"""plot/save graph using graphviz
- :param filename: target filepath for the image (None for the notebook)
- :param format: The output format used for rendering (``'pdf'``, ``'png'``, etc.)
- :param source: source step to add to the graph
- :param targets: list of target steps to add to the graph
- :param kw: kwargs passed to graphviz, e.g. rankdir="LR" (see: https://graphviz.org/doc/info/attrs.html)
+ :param filename: target filepath for the graph image (None for the notebook)
+ :param format: the output format used for rendering (``'pdf'``, ``'png'``, etc.)
+ :param source: source step to add to the graph image
+ :param targets: list of target steps to add to the graph image
+ :param kw: kwargs passed to graphviz, e.g. rankdir="LR" (see https://graphviz.org/doc/info/attrs.html)
:return: graphviz graph object
"""
return _generate_graphviz(
|
$.Debug.Sym_Matches: use symbol canonicalizer if available
TN: | ## vim: filetype=makoada
+<%namespace name="exts" file="extensions.mako" />
+
with Ada.Text_IO; use Ada.Text_IO;
with Ada.Unchecked_Conversion;
@@ -8,6 +10,11 @@ with Langkit_Support.Text; use Langkit_Support.Text;
with ${ada_lib_name}.Lexer; use ${ada_lib_name}.Lexer;
+${(exts.with_clauses(with_clauses + [
+ ((ctx.symbol_canonicalizer.unit_fqn, False)
+ if ctx.symbol_canonicalizer else None),
+]))}
+
package body ${ada_lib_name}.Debug is
--------
@@ -63,8 +70,15 @@ package body ${ada_lib_name}.Debug is
-----------------
function Sym_Matches (S : Symbol_Type; Text : String) return Boolean is
+ Symbol : constant Symbolization_Result :=
+ % if ctx.symbol_canonicalizer:
+ ${ctx.symbol_canonicalizer.fqn} (To_Text (Text))
+ % else:
+ Create_Symbol (To_Text (Text))
+ % endif
+ ;
begin
- return Image (S.all) = Text;
+ return Symbol.Success and then Image (S.all) = Image (Symbol.Symbol);
end Sym_Matches;
end ${ada_lib_name}.Debug;
|
Update magentocore.txt
Dup of Merging Reference section. | @@ -1035,6 +1035,7 @@ cloudservice.tw
silver-statistics.com
# Reference: https://twitter.com/felixaime/status/1219175480303202307
+# Reference: https://twitter.com/matr0cks/status/1220418827751763969
jqueryextplugin.com
@@ -1064,7 +1065,3 @@ sagepay-live.com
# Reference: https://www.bleepingcomputer.com/news/security/euro-cup-and-olympics-ticket-reseller-hit-by-magecart/
opendoorcdn.com
-
-# Reference: https://twitter.com/matr0cks/status/1220418827751763969
-
-jqueryextplugin.com
|
Typo
Test Plan: N/A
Reviewers: #ft, natekupp | @@ -89,7 +89,7 @@ def index_view(_path):
except seven.FileNotFoundError:
text = '''<p>Can't find webapp files. Probably webapp isn't built. If you are using
dagit, then probably it's a corrupted installation or a bug. However, if you are
- developing dagit locally, you problem can be fixed as follows:</p>
+ developing dagit locally, your problem can be fixed as follows:</p>
<pre>cd ./python_modules/
make rebuild_dagit</pre>'''
|
Add lsof, which is useful for cleaning up dead tmp files, to the
pipeline install | @@ -14,7 +14,7 @@ Quick install, for Debian and Debian-esque systems:
sudo apt-get install build-essential python3-dev python3-pip \
libxml2-dev libxslt-dev zlib1g-dev libssl-dev libsqlite3-dev \
libffi-dev git tmux fontconfig-config fonts-dejavu-core \
- libfontconfig1 libjpeg-turbo8 libjpeg8
+ libfontconfig1 libjpeg-turbo8 libjpeg8 lsof
pip3 install --upgrade pip
Download phantomjs from http://phantomjs.org/download.html and place
|
Update book reference in ETS example
Update link to Ch 8 "Exponential smoothing" in Hyndman & Athanasopoulos forecasting book (3rd ed)
to fix to 404 error | "\n",
"However, not all of these methods are stable. Refer to [1] and references therein for more info about model stability.\n",
"\n",
- "[1] Hyndman, Rob J., and George Athanasopoulos. *Forecasting: principles and practice*, 3rd edition, OTexts, 2019. https://www.otexts.org/fpp3/7"
+ "[1] Hyndman, Rob J., and Athanasopoulos, George. *Forecasting: principles and practice*, 3rd edition, OTexts, 2021. https://otexts.com/fpp3/expsmooth.html"
]
},
{
|
remove debug prints
whoops | @@ -1120,12 +1120,9 @@ class H265Codec(VideoCodec):
if 'params' in safe:
params = safe['params']
if 'framedata' in safe:
- print("framedata is in safe")
- print(safe['framedata'])
if params:
params = params + ":"
params = params + self.safe_framedata(safe['framedata'])
- print(params)
if params:
optlist.extend(['-%s' % self.codec_params, params])
if 'tune' in safe:
|
Update README.md
minor pos clarification | # QRL
Quantum Resistant Ledger
-Python-based blockchain ledger utilising hash-based one-time merkle tree signature scheme (XMSS) instead of ECDSA. Proof-of-stake block selection via HMAC_DRBG PRF and a signed iterative hash chain reveal scheme.
+Python-based blockchain ledger utilising hash-based one-time merkle tree signature scheme (XMSS) instead of ECDSA. Proof-of-stake block selection via a signed iterative hash chain reveal scheme which is both probabilistic and random (https://github.com/theQRL/pos).
Hash-based signatures means larger transactions (6kb per tx, binary), longer keypair generation times and the need to record 'state' of transactions as each keypair can only be used once safely. Merkle tree usage enables a single address to be used for signing numerous transactions (up to 2^13 computationally easily enough). Transactions have an incremented nonce to allow wallets to know which MSS keypair to use - currently Lamport-Diffie and Winternitz one-time signatures as part of merkle signature schemes and XMSS/W-OTS+ are natively supported.
@@ -11,3 +11,4 @@ Hash-based signatures means larger transactions (6kb per tx, binary), longer key
+
|
Upgrade to pytest v4.6.5
Our CI build failures are related to pytest-dev/pytest#5903. We initially restricted pytest up to v4.1 in because pytest_twisted wasn't behaving well with the latest version of pytest at the time. | @@ -27,7 +27,7 @@ extras_require = {
# Twisted 19.7.0 dropped py3.4 support
"twisted:python_version == '3.4'": "twisted<=19.2.1",
"typing": ["typing>=3.6.4"],
- "tests": ["pytest<4.1", "pytest-mock", "pytest-cov", "pytest-twisted"],
+ "tests": ["pytest==4.6.5", "pytest-mock", "pytest-cov", "pytest-twisted"],
}
metadata = {
|
puppeteer_test: Change browser default viewport to allow Firefox.
When we run puppeteer with Firefox,
the `--window-size` option does not work,
which makes the bottom part of
the page cut off.
This commit fixes this issue
by setting the screen default viewport
to the maximum size of the window. | @@ -65,7 +65,10 @@ class CommonUtils {
"--no-sandbox",
"--disable-setuid-sandbox",
],
- defaultViewport: {width: 1280, height: 1024},
+ // TODO: Change defaultViewport to 1280x1024 when puppeteer fixes the window size issue with firefox.
+ // Here is link to the issue that is tracking the above problem https://github.com/puppeteer/puppeteer/issues/6442.
+ // @ts-expect-error: Because of https://github.com/puppeteer/puppeteer/issues/6885
+ defaultViewport: null,
headless: true,
});
}
|
Clear cache between test runs
This avoids any potential issue of caches persisting between runs | @@ -15,6 +15,12 @@ from wagtail.test.testapp.models import SimplePage
class TestPageUrlTags(TestCase):
fixtures = ["test.json"]
+ def setUp(self):
+ super().setUp()
+
+ # Clear caches
+ cache.clear()
+
def test_pageurl_tag(self):
response = self.client.get("/events/")
self.assertEqual(response.status_code, 200)
|
Add Rejax
Squashs:
* Add Rejax API to Development
* added `ticks` | @@ -413,6 +413,7 @@ API | Description | Auth | HTTPS | CORS |
| [QR code](http://qrtag.net/api/) | Create an easy to read QR code and URL shortener | No | Yes | Yes |
| [QR code](http://goqr.me/api/) | Generate and decode / read QR code graphics | No | Yes | Unknown |
| [QuickChart](https://quickchart.io/) | Generate chart and graph images | No | Yes | Yes |
+| [Rejax](https://rejax.io/) | Reverse AJAX service to notify clients | `apiKey` | Yes | No |
| [ReqRes](https://reqres.in/ ) | A hosted REST-API ready to respond to your AJAX requests | No | Yes | Unknown |
| [RSS feed to JSON](https://rss-to-json-serverless-api.vercel.app) | Returns RSS feed in JSON format using feed URL | No | Yes | Yes |
| [Scraper.AI](https://docs.scraper.ai/#/) | Extract and monitor data from any website | `apiKey` | Yes | Unknown |
|
Update Kentucky.md
changed the date | @@ -34,7 +34,7 @@ A young woman was injured by a rubber bullet she took to the head.
* [Video](https://twitter.com/shannynsharyse/status/1267015577266249728)
* [Photo of Victim](https://twitter.com/shannynsharyse/status/1266631722239766528)
-### Police shoot at cars in traffic from overpass | June 1st?
+### Police shoot at cars in traffic from overpass | May 31st
Several police officers shoot from an overpass at cars stuck in traffic. Bystander reports at least one car window broken.
|
Re-add in pylint plugin
The `undefined-variable` pylint plugin can now be re-added in, as it was removed until [an underlying issue](https://github.com/PyCQA/pylint/issues/3791) had been fixed. | @@ -45,8 +45,7 @@ enable=
syntax-error,
too-many-function-args,
trailing-whitespace,
- # Disabling until https://github.com/PyCQA/pylint/issues/3791 is fixed
- # undefined-variable,
+ undefined-variable,
unexpected-keyword-arg,
unhashable-dict-key,
unnecessary-pass,
|
update list of releasers within MAINTAINERS.md
This adds David and Hamzah who are engineers at Ambassador Labs.
They will be contributing to the project and assisting with releases.
Once they have met the governance guidelines they will be nominated
to become maintainers. | @@ -8,7 +8,7 @@ describes governance guidelines and maintainer responsibilities.
Maintainers are listed in alphabetical order.
| Maintainer | GitHub ID | Affiliation |
-| ---------- | --------- | ----------- |
+| ---------------- | --------------------------------------------- | --------------------------------------------------- |
| Aidan Hahn | [aidanhahn](https://github.com/aidanhahn) | unaffiliated |
| Alex Gervais | [alexgervais](https://github.com/alexgervais) | [Ambassador Labs](https://www.github.com/datawire/) |
| Alice Wasko | [aliceproxy](https://github.com/aliceproxy) | [Ambassador Labs](https://www.github.com/datawire/) |
@@ -20,7 +20,9 @@ In addition to the maintainers, Emissary releases may be created by any
of the following (also listed in alphabetical order):
| Releaser | GitHub ID | Affiliation |
-| -------- | --------- | ----------- |
+| ------------ | ----------------------------------- | --------------------------------------------------- |
+| David Dymko | [ddymko](https://github.com/ddymko) | [Ambassador Labs](https://www.github.com/datawire/) |
+| Hamzah Qudsi | [haq204](https://github.com/haq204) | [Ambassador Labs](https://www.github.com/datawire/) |
| Will Hardin | [w-h37](https://github.com/w-h37) | [Ambassador Labs](https://www.github.com/datawire/) |
## Maintainers Emeriti
@@ -31,4 +33,3 @@ of the following (also listed in alphabetical order):
## Releasers Emeriti
* Noah Krause ([iNoahNothing](https://github.com/iNoahNothing))
-
|
swarming: clear cache in main_test.py
This is to fix flaky test failure in | @@ -21,8 +21,9 @@ from depot_tools import auto_stub
from depot_tools import fix_encoding
# client/
-from utils import subprocess42
from utils import file_path
+from utils import subprocess42
+from utils import tools
import swarmingserver_bot_fake
from bot_code import bot_main
@@ -31,6 +32,7 @@ from bot_code import bot_main
class TestCase(auto_stub.TestCase):
def setUp(self):
super(TestCase, self).setUp()
+ tools.clear_cache_all()
self._tmpdir = tempfile.mkdtemp(prefix='swarming_main')
self._zip_file = os.path.join(self._tmpdir, 'swarming_bot.zip')
code, _ = swarmingserver_bot_fake.gen_zip(self.url)
|
stream_edit: Replace -1 with settings_config.retain_message_forever.
This commit replaces -1 with settings_config.retain_message_forever
for the message retention days setting. | @@ -58,13 +58,14 @@ exports.get_retention_policy_text_for_subscription_type = function (sub) {
let message_retention_days = sub.message_retention_days;
// If both this stream and the organization-level policy are to retain forever,
// there's no need to comment on retention policies when describing the stream.
- if (page_params.realm_message_retention_days === -1 &&
- (sub.message_retention_days === null || sub.message_retention_days === -1)) {
+ if (page_params.realm_message_retention_days === settings_config.retain_message_forever
+ && (sub.message_retention_days === null ||
+ sub.message_retention_days === settings_config.retain_message_forever)) {
return;
}
// Forever for this stream, overriding the organization default
- if (sub.message_retention_days === -1) {
+ if (sub.message_retention_days === settings_config.retain_message_forever) {
return i18n.t("Messages in this stream will be retained forever.");
}
@@ -79,7 +80,7 @@ exports.get_retention_policy_text_for_subscription_type = function (sub) {
exports.get_display_text_for_realm_message_retention_setting = function () {
const realm_message_retention_days = page_params.realm_message_retention_days;
- if (realm_message_retention_days === -1) {
+ if (realm_message_retention_days === settings_config.retain_message_forever) {
return i18n.t("(forever)");
}
return i18n.t("(__message_retention_days__ days)", {message_retention_days: realm_message_retention_days});
@@ -97,7 +98,7 @@ function set_stream_message_retention_setting_dropdown(stream) {
let value = "retain_for_period";
if (stream.message_retention_days === null) {
value = "realm_default";
- } else if (stream.message_retention_days === -1) {
+ } else if (stream.message_retention_days === settings_config.retain_message_forever) {
value = "forever";
}
|
Support downloading the gocaves binary on ARM
Tested-by: Build Bot | @@ -18,6 +18,7 @@ from __future__ import annotations
import json
import os
import pathlib
+import platform
import select
import socket
import sys
@@ -266,7 +267,7 @@ class CavesMockServer(MockServer):
self._caves_version = caves_version
if self._caves_version is None:
- self._caves_version = 'v0.0.1-69'
+ self._caves_version = 'v0.0.1-74'
self._build_caves_url(caves_url)
self._validate_caves_path(caves_path)
@@ -286,6 +287,9 @@ class CavesMockServer(MockServer):
def _build_caves_url(self, url):
if sys.platform.startswith('linux'):
+ if platform.machine() == 'aarch64':
+ self._caves_url = f"{url}/{self._caves_version}/gocaves-linux-arm64"
+ else:
self._caves_url = f"{url}/{self._caves_version}/gocaves-linux-amd64"
elif sys.platform.startswith('darwin'):
self._caves_url = f"{url}/{self._caves_version}/gocaves-macos"
@@ -297,7 +301,7 @@ class CavesMockServer(MockServer):
def _validate_caves_path(self, caves_path=None):
if not (caves_path and not caves_path.isspace()):
if sys.platform.startswith('linux'):
- caves_path = 'gocaves-linux-amd64'
+ caves_path = 'gocaves-linux-arm64' if platform.machine() == 'aarch64' else 'gocaves-linux-amd64'
elif sys.platform.startswith('darwin'):
caves_path = 'gocaves-macos'
elif sys.platform.startswith('win32'):
|
Update gcp_managed_relational_db.py
Fix _ParseEndpoint (should have been in last PR) | @@ -245,7 +245,7 @@ class GCPManagedRelationalDb(managed_relational_db.BaseManagedRelationalDb):
resource URI (string)
"""
try:
- selflink = describe_instance_json[0]['selfLink']
+ selflink = describe_instance_json['selfLink']
except:
selflink = ''
logging.exception('Error attempting to read stdout. Creation failure.')
|
Update Node Exporter to 1.1.0
Release notes: | @@ -29,9 +29,8 @@ packages:
<<: *default_context
static:
<<: *default_static_context
- version: 1.0.1
+ version: 1.1.0
license: ASL 2.0
- release: 2
URL: https://github.com/prometheus/node_exporter
summary: Prometheus exporter for machine metrics, written in Go with pluggable metric collectors.
description: |
|
Release 4.5.1
adapt release notes | @@ -3,6 +3,9 @@ The released versions correspond to PyPi releases.
## Version 4.6.0 (as yet unreleased)
+## [Version 4.5.1](https://pypi.python.org/pypi/pyfakefs/4.5.1) (2021-08-29)
+This is a bugfix release.
+
### Fixes
* added handling of path-like where missing
* improved handling of `str`/`bytes` paths
|
GDB helpers: fix pretty-printers after recent internal structs renamings
TN: | @@ -48,12 +48,14 @@ class Context(object):
corresponding entity records.
"""
return {
- '{}__implementation__entity_{}'.format(
+ '{}__implementation__internal_entity_{}'.format(
self.lib_name,
Name.from_camel_with_underscores(name).lower
) for name in self.astnode_names
- } | {'{}__implementation__ast_envs__entity'
- .format(self.lib_name)}
+ } | {
+ '{}__implementation__internal_entity'.format(self.lib_name),
+ '{}__implementation__ast_envs__entity'.format(self.lib_name),
+ }
def decode_state(self, frame=None):
"""
|
[swarming] Enable stream BotEvent, TaskRequest and TaskResult to BigQuery
Update cron.yaml to enable the cron jobs that do the stream of the three tables;
swarming.bot_events, swarming.task_requests and swarming.task_results. | @@ -30,6 +30,16 @@ cron:
schedule: every 5 minutes synchronized
target: backend
+- description: Send task requests to BigQuery
+ target: backend
+ url: /internal/cron/tasks/send_requests_to_bq
+ schedule: every 1 minutes
+
+- description: Send task results to BigQuery
+ target: backend
+ url: /internal/cron/tasks/send_results_to_bq
+ schedule: every 1 minutes
+
### Bots
@@ -48,6 +58,11 @@ cron:
schedule: every 5 minutes synchronized
target: backend
+- description: Send bot events to BigQuery
+ target: backend
+ url: /internal/cron/bots/send_to_bq
+ schedule: every 1 minutes
+
### Named caches
|
reports spliter adjustments.
Also replace ';' separator with '|' | @@ -1191,7 +1191,6 @@ class DialogReportCodes(QtWidgets.QDialog):
pass
coder = self.ui.comboBox_coders.currentText()
- #self.html_results = ""
self.html_links = [] # For html file output with media
search_text = self.ui.lineEdit.text()
@@ -1658,12 +1657,11 @@ class DialogReportCodes(QtWidgets.QDialog):
self.eventFilterTT.setTextResults(self.text_results)
self.ui.textEdit.cursorPositionChanged.connect(self.show_context_of_clicked_heading)
- # Need to resize splitter as it automatically adjusts to 50%/50%
- self.ui.splitter.setSizes([100, 300])
-
- # Fill case matrix
+ # Fill case matrix or clear third splitter pane.
if self.case_ids != "":
self.fill_matrix(self.text_results, self.image_results, self.av_results, self.case_ids)
+ else:
+ self.ui.splitter.replaceWidget(2, QtWidgets.QTableWidget())
def put_image_into_textedit(self, img, counter, text_edit):
""" Scale image, add resource to document, insert image.
@@ -1868,7 +1866,7 @@ class DialogReportCodes(QtWidgets.QDialog):
for att in self.attribute_selection:
label += att[0] + " " + att[3] + " "
label += ','.join(att[4])
- label += "; "
+ label += "| "
self.ui.label_selections.setText(label)
def select_files(self):
@@ -1880,7 +1878,6 @@ class DialogReportCodes(QtWidgets.QDialog):
the user must press select files button then cancel the dialog.
"""
- self.ui.splitter.setSizes([300, 300, 0])
self.ui.pushButton_fileselect.setToolTip("")
self.ui.pushButton_caseselect.setToolTip("")
self.case_ids = ""
@@ -1900,7 +1897,7 @@ class DialogReportCodes(QtWidgets.QDialog):
files_text = ""
for row in selected_files:
tmp_ids += "," + str(row['id'])
- files_text += "; " + row['name']
+ files_text += "| " + row['name']
files_text = files_text[2:]
tooltip += files_text
if len(tmp_ids) > 0:
@@ -1917,7 +1914,6 @@ class DialogReportCodes(QtWidgets.QDialog):
If neither are selected the default is all files are selected.
"""
- self.ui.splitter.setSizes([300, 300, 0])
self.ui.pushButton_fileselect.setToolTip("")
self.ui.pushButton_caseselect.setToolTip("")
self.file_ids = ""
@@ -1937,7 +1933,7 @@ class DialogReportCodes(QtWidgets.QDialog):
cases_text = ""
for row in selected_cases:
tmp_ids += "," + str(row['id'])
- cases_text += "; " + row['name']
+ cases_text += "| " + row['name']
cases_text = cases_text[2:]
tooltip += cases_text
if len(tmp_ids) > 0:
|
add test cases for Noneable(Permissive())
Summary: didnt find any problems but additional test coverage cant hurt
Test Plan: all tests
Reviewers: sashank, prha, schrockn | @@ -155,3 +155,12 @@ def test_post_process_config():
'bar': 'baz',
'mau': 'mau',
}
+
+ noneable_permissive_config_type = resolve_to_config_type(
+ {'args': Field(Noneable(Permissive()), is_required=False, default_value=None)}
+ )
+ assert post_process_config(
+ noneable_permissive_config_type, {'args': {'foo': 'wow', 'mau': 'mau'}}
+ ).value['args'] == {'foo': 'wow', 'mau': 'mau',}
+ assert post_process_config(noneable_permissive_config_type, {'args': {}}).value['args'] == {}
+ assert post_process_config(noneable_permissive_config_type, None).value['args'] == None
|
DOC: signal: Refer to fs instead of nyq in the firwin docstring.
The nyq argument is deprecated, so the descriptions of the other
arguments should refer to fs instead of nyq. | @@ -279,14 +279,14 @@ def firwin(numtaps, cutoff, width=None, window='hamming', pass_zero=True,
order + 1). `numtaps` must be odd if a passband includes the
Nyquist frequency.
cutoff : float or 1D array_like
- Cutoff frequency of filter (expressed in the same units as `nyq`)
+ Cutoff frequency of filter (expressed in the same units as `fs`)
OR an array of cutoff frequencies (that is, band edges). In the
latter case, the frequencies in `cutoff` should be positive and
- monotonically increasing between 0 and `nyq`. The values 0 and
- `nyq` must not be included in `cutoff`.
+ monotonically increasing between 0 and `fs/2`. The values 0 and
+ `fs/2` must not be included in `cutoff`.
width : float or None, optional
If `width` is not None, then assume it is the approximate width
- of the transition region (expressed in the same units as `nyq`)
+ of the transition region (expressed in the same units as `fs`)
for use in Kaiser FIR filter design. In this case, the `window`
argument is ignored.
window : string or tuple of string and parameter values, optional
@@ -302,8 +302,8 @@ def firwin(numtaps, cutoff, width=None, window='hamming', pass_zero=True,
- 0 (DC) if the first passband starts at 0 (i.e. pass_zero
is True)
- - `nyq` (the Nyquist frequency) if the first passband ends at
- `nyq` (i.e the filter is a single band highpass filter);
+ - `fs/2` (the Nyquist frequency) if the first passband ends at
+ `fs/2` (i.e the filter is a single band highpass filter);
center of first passband otherwise
nyq : float, optional
|
Fixed error in documentation
The renamed_file function contains the following which ends up on readthedocs:
:note: This property is deprecated, please use ``renamed_file`` instead.
Removed the line | @@ -384,7 +384,6 @@ class Diff(object):
@property
def renamed_file(self):
""":returns: True if the blob of our diff has been renamed
- :note: This property is deprecated, please use ``renamed_file`` instead.
"""
return self.rename_from != self.rename_to
|
Fix network-isolation.j2.yaml to ignore VIPs for disabled networks
This change modifies network-isolation.j2.yaml to ignore VIPs for
networks that are disabled. This fixes a bug where VIPs would be
created in network-isolation.yaml even if a network was disabled. | @@ -17,7 +17,7 @@ resource_registry:
{%- endfor %}
# Port assignments for the VIPs
- {%- for network in networks if network.vip %}
+ {%- for network in networks if network.vip and network.enabled|default(true) %}
OS::TripleO::Network::Ports::{{network.name}}VipPort: ../network/ports/{{network.name_lower|default(network.name.lower())}}.yaml
{%- endfor %}
OS::TripleO::Network::Ports::RedisVipPort: ../network/ports/vip.yaml
|
ENH: print a warning when assuming 0 for the position
closes | @@ -948,6 +948,8 @@ def __read_and_stash_a_motor(obj, initial_positions, coupled_parents):
reading = yield Msg('read', obj)
if reading is None:
# this plan may be being list-ified
+ print("*** all positions for {m.name} are "
+ "relative to current position ***".format(m=obj))
cur_pos = 0
else:
fields = getattr(obj, 'hints', {}).get('fields', [])
|
Working Oven Driver
I noticed that wait time is needed after writing anything to the oven (run/stop command, setpoint command). The wait time required appeared to be around 800+ms. I suggested to wait for 1000ms in the docstrings. Queries do not seem to require this wait time. | @@ -26,6 +26,7 @@ from pymeasure.instruments.validators import strict_discrete_set, strict_range
class Thermotron3800(Instrument):
""" Represents the Thermotron 3800 Oven.
+ For now, this driver only supports using Control Channel 1.
"""
def __init__(self, resourceName, **kwargs):
@@ -36,24 +37,93 @@ class Thermotron3800(Instrument):
)
id = Instrument.measurement(
- "IDEN?", """ Reads the instrument identification """
+ "IDEN?", """ Reads the instrument identification
+
+ :return: String
+ """
)
temperature = Instrument.measurement(
"PVAR1?", """ Reads the current temperature of the oven
via built in thermocouple
+
+ :return: float
"""
)
+ mode = Instrument.measurement(
+ "MODE??", """ Gets the operating mode of the oven.
+
+ :return: Tuple(String, int)
+ """,
+ get_process=lambda mode: Thermotron3800.__translate_mode(mode)
+ )
+
setpoint = Instrument.control(
"SETP1?", "SETP1,%g",
- """ A loating point property that controls the setpoint
+ """ A floating point property that controls the setpoint
of the oven in Celsius. This property can be set.
+
+ Insert wait time after this command. This wait time should be >1000ms for consistent results.
+ Failing to wait for an adequate time may cause errors in subsequent oven commands.
+
+ :return: None
""",
validator=strict_range,
values=[-55, 150]
)
-if __name__ == "__main__":
- thermotron = Thermotron3800("GPIB::1::INSTR")
- print(thermotron.id)
+ def run(self):
+ '''
+ Insert wait time after this command. This wait time should be >1000ms for consistent results.
+ Failing to wait for an adequate time may cause errors in subsequent oven commands.
+ :return: None
+ '''
+ self.write("RUNM")
+
+ def stop(self):
+ '''
+ Insert wait time after this command. This wait time should be >1000ms for consistent results.
+ Failing to wait for an adequate time may cause errors in subsequent oven commands.
+ :return: None
+ '''
+ self.write("STOP")
+
+ def initalize_oven(self):
+ '''
+ Please wait 3 seconds after calling initialize_oven before running
+ any other oven commands (per manufacterer's instructions).
+
+ :return: None
+ '''
+ self.write("INIT")
+
+ @staticmethod
+ def __translate_mode(mode_coded_integer):
+ '''
+ Bit 0 = Program mode
+ Bit 1 = Edit mode (controller in stop mode)
+ Bit 2 = View program mode
+ Bit 3 = Edit mode (controller in hold mode)
+ Bit 4 = Manual mode
+ Bit 5 = Delayed start mode
+ Bit 6 = Unused
+ Bit 7 = Calibration mode
+ '''
+ map = {
+ 1: "Program mode",
+ 2: "Edit mode (controller in stop mode)",
+ 4: "View program mode",
+ 8: "Edit mode (controller in hold mode)",
+ 16: "Manual mode",
+ 32: "Delayed start mode",
+ 64: "Unused (Error)",
+ 128: "Calibration mode"
+ }
+
+ mode_coded_integer_int = int(mode_coded_integer)
+
+ if mode_coded_integer in map:
+ return tuple( (map[mode_coded_integer_int], mode_coded_integer_int) )
+ else:
+ return tuple( ("Unknown or combined mode.", mode_coded_integer_int) )
|
Update howto.rst
silly footnote | @@ -167,7 +167,7 @@ The output still warns us about something:
WARNING:phys2bids.physio_obj:Found 158 timepoints less than expected!
WARNING:phys2bids.physio_obj:Correcting time offset, assuming missing timepoints are at the beginning (try again with a more liberal thr)
-How come?!? We know there are exactly 158 timepoints![#]_ In order to find the triggers, ``phys2bids`` gets the first derivative of the trigger channel, and uses a threshold (default 2.5) to get the peaks of the derivative, corresponding to the trigger event. If the threshold is too strict or is too liberal for the recorded trigger, it won't get all the trigger points.
+How come?!? We know there are exactly 158 timepoints![1]_ In order to find the triggers, ``phys2bids`` gets the first derivative of the trigger channel, and uses a threshold (default 2.5) to get the peaks of the derivative, corresponding to the trigger event. If the threshold is too strict or is too liberal for the recorded trigger, it won't get all the trigger points.
| ``phys2bids`` was created to deal with little sampling errors - such as distracted researchers that started sampling a bit too late than expected. For this reason, if it finds less timepoints than the amount specified, it will assume that the error was caused by a *distracted researcher*.
Therefore, we need to change the ``-thr`` input until ``phys2bids`` finds the correct number of timepoints. Looking at the tutorial_file_trigger_time.png file can help determine what threshold is more appropriate. For this tutorial file, a threshold of 0.735 finds the right number of time points.
|
Allow ports in k8s service urls for s3 mock
If there is a port in the host for the request, then this if statement
is not tripped. | @@ -168,7 +168,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
or host.startswith("localhost")
or host.startswith("localstack")
or re.match(r"^[^.]+$", host)
- or re.match(r"^.*\.svc\.cluster\.local$", host)
+ or re.match(r"^.*\.svc\.cluster\.local:?\d*$", host)
):
# Default to path-based buckets for (1) localhost, (2) localstack hosts (e.g. localstack.dev),
# (3) local host names that do not contain a "." (e.g., Docker container host names), or
|
framework/getters: Ignore connection errors with HTTP getter
If the http getter is used but a connection is not avalible, when
attempting to fetch the resource index an error will be raised. Now we
ignore the error allowing for the remaining getters to be used. | @@ -203,7 +203,7 @@ class Http(ResourceGetter):
def __init__(self, **kwargs):
super(Http, self).__init__(**kwargs)
self.logger = logger
- self.index = None
+ self.index = {}
def register(self, resolver):
resolver.register(self.get, SourcePriority.remote)
@@ -212,7 +212,12 @@ class Http(ResourceGetter):
if not resource.owner:
return # TODO: add support for unowned resources
if not self.index:
+ try:
self.index = self.fetch_index()
+ except requests.exceptions.RequestException as e:
+ msg = 'Skipping HTTP getter due to connection error: {}'
+ self.logger.debug(msg.format(e.message))
+ return
if resource.kind == 'apk':
# APKs must always be downloaded to run ApkInfo for version
# information.
|
STY: matrix to jobs
Switched from matrix to jobs. | language: python
dist: xenial
-matrix:
+jobs:
include:
- name: 'docs'
python: '3.6'
@@ -14,6 +14,9 @@ matrix:
script: pytest --cov=pysat/
- python: '3.8'
script: pytest --cov=pysat/
+ allow_failures:
+ - name: 'docs'
+ python: '3.6'
services: xvfb
cache: pip
@@ -49,7 +52,3 @@ install:
after_success:
- coveralls --rcfile=setup.cfg
-
-allow_failures:
- - name: 'docs'
- python: '3.6'
|
fix a bug of calling averager.get()
Before any update is performed, calling get() will return NaN originally. It should return instead, regardless of self._mass==0 | @@ -180,8 +180,10 @@ class EMAverager(tf.Module):
Tensor: the current average
"""
return tf.nest.map_structure(
- lambda average: (average / tf.cast(
- self._mass, dtype=average.dtype)), self._average)
+ lambda average: (average / tf.maximum(
+ tf.cast(self._mass, dtype=average.dtype),
+ tf.cast(self._update_rate, dtype=average.dtype))),
+ self._average)
def average(self, tensor):
"""Combines self.update and self.get in one step. Can be handy in practice.
|
Make human readable date representation consistent
Django humanize and moment libraries use slightly different
logic for representing dates. This commit is to make
representations as similar as possible. | mod.factory('fromQueryParams', [_fromQueryParams]);
mod.factory('getCSRFToken', [_getToken]);
+ // Configure moment.js tresholds
+ moment.relativeTimeRounding(Math.floor);
+ moment.relativeTimeThreshold('s', 60);
+ // NOTE(cutwater): Setting 'ss' treshhold before 's' overrides
+ // it's value to 's' - 1
+ moment.relativeTimeThreshold('ss', 0);
+ moment.relativeTimeThreshold('m', 60);
+ moment.relativeTimeThreshold('h', 24);
+ moment.relativeTimeThreshold('d', 31);
+ moment.relativeTimeThreshold('M', 12);
+
mod.filter('timeFromNow', function () {
return function (value) {
return moment(value).fromNow();
|
Display correct error message for missing imports
Resolves
When a missing import is found in a script, this changes the error
message displayed to indicate that there's a missing import, rather than
giving a generic error message similar to the one raised when the script
does not exist | @@ -107,7 +107,7 @@ class Command(EmailNotificationCommand):
finally:
exc_traceback = None
- if verbosity > 1:
+ if verbosity > 0 and not silent:
if verbosity > 2:
traceback.print_exc()
print(ERROR("Cannot import module '%s': %s." % (mod, e)))
|
Update README.md
Point to the branch with bug fixes, not the tag ! | @@ -43,7 +43,7 @@ The ``argopy`` library should work under all OS (Linux, Mac and Windows) and wit
## Usage
-[](https://binder.pangeo.io/v2/gh/euroargodev/argopy/v0.1.6?urlpath=lab/tree/docs/tryit.ipynb)
+[](https://binder.pangeo.io/v2/gh/euroargodev/argopy/v0.1.6-branch?urlpath=lab/tree/docs/tryit.ipynb)
### Fetching Argo Data
|
Updating the framework to include the app package data
As part of our continous integration system it has become
clear that gathering the app package data as well as the
version name can provide useful.
Adding this functionality to mainline as it could prove
useful to other developers. | @@ -727,6 +727,7 @@ class PackageHandler(object):
def setup(self, context):
context.update_metadata('app_version', self.apk_info.version_name)
+ context.update_metadata('app_name', self.apk_info.package)
self.initialize_package(context)
self.start_activity()
self.target.execute('am kill-all') # kill all *background* activities
|
Work on byte level when parsing markdown
Reasoning: instead encoding every character one by one as we
encounter them to use half their length as the correct offset,
we can simply encode the whole string at once as utf-16le and
work with that directly. | @@ -11,8 +11,6 @@ from ..tl.types import (
MessageEntityPre, MessageEntityTextUrl
)
-def tg_string_len(s):
- return len(s.encode('utf-16le')) // 2
class Mode(Enum):
"""Different modes supported by Telegram's Markdown"""
@@ -31,7 +29,10 @@ DEFAULT_DELIMITERS = {
'```': Mode.PRE
}
-DEFAULT_URL_RE = re.compile(r'\[(.+?)\]\((.+?)\)')
+# Regex used to match utf-16le encoded r'\[(.+?)\]\((.+?)\)',
+# reason why there's '\0' after every match-literal character.
+DEFAULT_URL_RE = re.compile(b'\\[\0(.+)\\]\0\\(\0(.+?)\\)\0')
+
def parse(message, delimiters=None, url_re=None):
"""
@@ -40,40 +41,45 @@ def parse(message, delimiters=None, url_re=None):
dictionary (or default if None).
The url_re(gex) must contain two matching groups: the text to be
- clickable and the URL itself.
+ clickable and the URL itself, and be utf-16le encoded.
"""
+ # Work on byte level with the utf-16le encoding to get the offsets right.
+ # The offset will just be half the index we're at.
if url_re is None:
url_re = DEFAULT_URL_RE
elif url_re:
if isinstance(url_re, str):
- url_re = re.compile(url_re)
+ url_re = re.compile(url_re.encode('utf-16le'))
if not delimiters:
if delimiters is not None:
return message, []
delimiters = DEFAULT_DELIMITERS
+ delimiters = {k.encode('utf-16le'): v for k, v in delimiters.items()}
+
+ i = 0
result = []
current = Mode.NONE
- offset = 0
- i = 0
+ message = message.encode('utf-16le')
while i < len(message):
url_match = None
if url_re and current == Mode.NONE:
url_match = url_re.match(message, pos=i)
if url_match:
- message = ''.join((
+ message = b''.join((
message[:url_match.start()],
url_match.group(1),
message[url_match.end():]
))
result.append((
- offset,
- offset + tg_string_len(url_match.group(1)),
- (Mode.URL, url_match.group(2))
+ i // 2,
+ (i + len(url_match.group(1))) // 2,
+ (Mode.URL, url_match.group(2).decode('utf-16le'))
))
i += len(url_match.group(1))
+
if not url_match:
for d, m in delimiters.items():
if message[i:i + len(d)] == d and current in (Mode.NONE, m):
@@ -82,21 +88,20 @@ def parse(message, delimiters=None, url_re=None):
message = message[:i] + message[i + len(d):]
if current == Mode.NONE:
- result.append(offset)
+ result.append(i // 2)
current = m
else:
- result[-1] = (result[-1], offset, current)
+ result[-1] = (result[-1], i // 2, current)
current = Mode.NONE
break
if i < len(message):
- offset += tg_string_len(message[i])
- i += 1
+ i += 2
if result and not isinstance(result[-1], tuple):
result.pop()
- return message, result
+ return message.decode('utf-16le'), result
def parse_tg(message, delimiters=None):
|
update API CTA
quick change to just make it a bit more clear what we offer with eMap API if you're coming from eMap.org | "electricityorigin24h": "Origin of electricity in the last 24 hours",
"electricityproduction24h": "Electricity production in the last 24 hours",
"electricityprices24h": "Electricity prices in the last 24 hours",
- "Getdata": "Get historical data, marginal and forecast API"
+ "Getdata": "Get hourly historical, live, and forecast data with electricityMap API"
},
"footer": {
"foundbugs": "Found bugs or have ideas? Report them",
|
Removed trailing comma in function arguments
This prevented the game from running with python3.5. | @@ -62,7 +62,7 @@ def sample_sequence(
context=None,
temperature=1,
top_k=0,
- top_p=1,
+ top_p=1
):
if start_token is None:
assert context is not None, "Specify exactly one of start_token and context!"
|
llvm, function/SoftMax: Remove dead code generation
Add assert to check the invariant | @@ -4036,13 +4036,8 @@ class SoftMax(NormalizingFunction):
def __gen_llvm_exp_div(self, builder, index, ctx, vi, vo, gain, exp_sum):
- output_type = self.params[OUTPUT_TYPE]
+ assert self.get_current_function_param(OUTPUT_TYPE) == ALL
ptro = builder.gep(vo, [ctx.int32_ty(0), index])
-
- if output_type in (MAX_VAL, MAX_INDICATOR):
- builder.store(ctx.float_ty(0), ptro)
- return
-
ptri = builder.gep(vi, [ctx.int32_ty(0), index])
exp_f = ctx.module.declare_intrinsic("llvm.exp", [ctx.float_ty])
orig_val = builder.load(ptri)
@@ -4073,7 +4068,7 @@ class SoftMax(NormalizingFunction):
vector_length = ctx.int32_ty(vi.type.pointee.count)
builder = helpers.for_loop_zero_inc(builder, vector_length, inner, "exp_sum_max")
- output_type = self.params[OUTPUT_TYPE]
+ output_type = self.get_current_function_param(OUTPUT_TYPE)
exp_sum = builder.load(exp_sum_ptr)
index = builder.load(max_ind_ptr)
ptro = builder.gep(vo, [ctx.int32_ty(0), index])
|
Add missing methods to Python 2 concurrent.futures
* Add missing methods to Python 2 concurrent.futures
Future.exception_info() and Future.set_exception_info() are methods
present only in the Python 2 backport of concurrent.futures.
* Mark timeout args as optional | +# Stubs for concurrent.futures (Python 2)
+
from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Optional, Set, Tuple, Union
+from types import TracebackType
_T = TypeVar('_T')
@@ -11,17 +14,19 @@ class Future(Generic[_T]):
def cancelled(self) -> bool: ...
def running(self) -> bool: ...
def done(self) -> bool: ...
- def result(self, timeout: float = ...) -> _T: ...
- def exception(self, timeout: float = ...) -> Any: ...
+ def result(self, timeout: Optional[float] = ...) -> _T: ...
+ def exception(self, timeout: Optional[float] = ...) -> Any: ...
+ def exception_info(self, timeout: Optional[float] = ...) -> Tuple[Any, Optional[TracebackType]]: ...
def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ...
def set_running_or_notify_cancel(self) -> bool: ...
def set_result(self, result: _T) -> None: ...
def set_exception(self, exception: Any) -> None: ...
+ def set_exception_info(self, exception: Any, traceback: TracebackType) -> None: ...
class Executor:
def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
- def map(self, func: Callable[..., _T], *iterables: Any, timeout: float = ...) -> Iterable[_T]: ...
+ def map(self, func: Callable[..., _T], *iterables: Any, timeout: Optional[float] = ...) -> Iterable[_T]: ...
def shutdown(self, wait: bool = ...) -> None: ...
def __enter__(self) -> Executor: ...
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
@@ -38,4 +43,4 @@ FIRST_COMPLETED = ... # type: str
FIRST_EXCEPTION = ... # type: str
ALL_COMPLETED = ... # type: str
-def as_completed(fs: Iterable[Future], timeout: float = ...) -> Iterator[Future]: ...
+def as_completed(fs: Iterable[Future], timeout: Optional[float] = ...) -> Iterator[Future]: ...
|
Update README.rst
Added link to Quartznet developer blog | @@ -49,6 +49,8 @@ NeMo consists of:
* Read NVIDIA `Developer Blog for example applications <https://devblogs.nvidia.com/how-to-build-domain-specific-automatic-speech-recognition-models-on-gpus/>`_
+* Read NVIDIA `Developer Blog for Quartznet ASR model <https://devblogs.nvidia.com/develop-smaller-speech-recognition-models-with-nvidias-nemo-framework/>`_
+
* Recommended version to install is **0.9.0** via pip install nemo-toolkit
* Recommended NVIDIA `NGC NeMo Toolkit container <https://ngc.nvidia.com/catalog/containers/nvidia:nemo>`_
|
Type the constructor of IntEnum and IntFlag
These should only accept integers or enum members. | @@ -45,6 +45,7 @@ class Enum(metaclass=EnumMeta):
class IntEnum(int, Enum):
value: int
+ def __new__(cls: Type[_T], value: Union[int, _T]) -> _T: ...
def unique(enumeration: _S) -> _S: ...
@@ -53,6 +54,7 @@ _auto_null: Any
# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto()
class auto(IntFlag):
value: Any
+ def __new__(cls: Type[_T]) -> _T: ...
class Flag(Enum):
def __contains__(self: _T, other: _T) -> bool: ...
@@ -65,6 +67,7 @@ class Flag(Enum):
def __invert__(self: _T) -> _T: ...
class IntFlag(int, Flag):
+ def __new__(cls: Type[_T], value: Union[int, _T]) -> _T: ...
def __or__(self: _T, other: Union[int, _T]) -> _T: ...
def __and__(self: _T, other: Union[int, _T]) -> _T: ...
def __xor__(self: _T, other: Union[int, _T]) -> _T: ...
|
api/nxtdevices: Update color method.
This shares the implementation with the PUP color sensors, so update accordingly. | @@ -81,9 +81,8 @@ class ColorSensor:
:returns:
``Color.BLACK``, ``Color.BLUE``, ``Color.GREEN``, ``Color.YELLOW``,
- ``Color.RED``, ``Color.WHITE`` or ``None``.
- :rtype: :class:`Color <.parameters.Color>`, or ``None`` if no color is
- detected.
+ ``Color.RED``, ``Color.WHITE`` or ``Color.NONE``.
+ :rtype: :class:`Color <.parameters.Color>`
"""
pass
|
Store HilbertIndex in DirectMatrixWrapper
The HilbertInidex class performs some computation during initialization,
so it should be stored between calls to DirectMatrixWrapper::Apply. | @@ -33,22 +33,23 @@ namespace netket {
template <class Operator, class WfType = Eigen::VectorXcd>
class DirectMatrixWrapper : public AbstractMatrixWrapper<Operator, WfType> {
const Operator& operator_;
+ HilbertIndex hilbert_index_;
size_t dim_;
public:
explicit DirectMatrixWrapper(const Operator& the_operator)
: operator_(the_operator),
- dim_(HilbertIndex(the_operator.GetHilbert()).NStates()) {}
+ hilbert_index_(the_operator.GetHilbert()),
+ dim_(hilbert_index_.NStates()) {}
WfType Apply(const WfType& state) const override {
const auto& hilbert = operator_.GetHilbert();
- const HilbertIndex hilbert_index(hilbert);
WfType result(dim_);
result.setZero();
for (size_t i = 0; i < dim_; ++i) {
- auto v = hilbert_index.NumberToState(i);
+ auto v = hilbert_index_.NumberToState(i);
std::vector<std::complex<double>> matrix_elements;
std::vector<std::vector<int>> connectors;
@@ -58,7 +59,7 @@ class DirectMatrixWrapper : public AbstractMatrixWrapper<Operator, WfType> {
for (size_t k = 0; k < connectors.size(); ++k) {
auto vk = v;
hilbert.UpdateConf(vk, connectors[k], newconfs[k]);
- auto j = hilbert_index.StateToNumber(vk);
+ auto j = hilbert_index_.StateToNumber(vk);
result(j) += matrix_elements[k] * state(i);
}
|
add few comments on java setting
add few comments on java jdtls setting sample. Clarify the fields that need to be changed based on the user env. | @@ -398,11 +398,11 @@ npm install -g flow-language-server
"-XX:+UseG1GC",
"-XX:+UseStringDeduplication",
"-jar",
- "PATH/TO/jdt-language-server-latest/plugins/org.eclipse.equinox.launcher_*.jar"
+ "PATH/TO/jdt-language-server-latest/plugins/org.eclipse.equinox.launcher_*.jar" // 1. replace the PATH/TO with your own 2. replace * with the file version
"-configuration",
- "PATH/TO/jdt-language-server-latest/config_{win|mac|linux}", // depending on the OS
+ "PATH/TO/jdt-language-server-latest/config_{win|mac|linux}", // 1. replace the PATH/TO with your own 2. choose the config folder based on the OS
"-data",
- "<TEMP_DIR>/${project_base_name}/jdt_ws"
+ "<TEMP_DIR>/${project_base_name}/jdt_ws" // replace <TEMP_DIR> with the temp folder in your system. macOS: echo $TMPDIR
],
"enabled": true,
"languageId": "java" // will match source.java
|
Fix missing screenshots bug in docs build
When building the docs, our sphinx code looks for a json file which
indexes the available screenshots.
The only avaiable error handling was for a decode error but the build
would crash if no json file was found at all.
Fixes | @@ -163,7 +163,7 @@ class QtileClass(SimpleDirectiveMixin, Directive):
try:
with open(index, "r") as f:
shots = json.load(f)
- except json.JSONDecodeError:
+ except (json.JSONDecodeError, FileNotFoundError):
shots = {}
widget_shots = shots.get(class_name.lower(), dict())
|
Update apt_oceanlotus.txt
Alias from discussion. | # Copyright (c) 2014-2019 Maltrail developers (https://github.com/stamparm/maltrail/)
# See the file 'LICENSE' for copying permission
-# Aliases: apt32, apt-c-32, oceanlotus
+# Aliases: apt32, apt-c-32, oceanlotus, SectorF01
# Reference: https://www.fireeye.com/blog/threat-research/2017/05/cyber-espionage-apt32.html
|
Modify the group types date
Modify the group types date to the correct value | @@ -22,10 +22,13 @@ from tempest.tests.lib.services import base
class TestGroupTypesClient(base.BaseServiceTest):
FAKE_CREATE_GROUP_TYPE = {
"group_type": {
- "name": "group-type-001",
- "description": "Test group type 1",
- "group_specs": {},
+ "id": "6685584b-1eac-4da6-b5c3-555430cf68ff",
+ "name": "grp-type-001",
+ "description": "group type 001",
"is_public": True,
+ "group_specs": {
+ "consistent_group_snapshot_enabled": "<is> False"
+ }
}
}
@@ -35,7 +38,6 @@ class TestGroupTypesClient(base.BaseServiceTest):
"name": "group-type-001",
"description": "Test group type 1",
"is_public": True,
- "created_at": "20127-06-20T03:50:07Z",
"group_specs": {},
}
}
@@ -57,24 +59,27 @@ class TestGroupTypesClient(base.BaseServiceTest):
"name": "group-type-001",
"description": "Test group type 1",
"is_public": True,
- "created_at": "2017-06-20T03:50:07Z",
- "group_specs": {},
+ "group_specs": {
+ "consistent_group_snapshot_enabled": "<is> False"
+ }
},
{
"id": "e479997c-650b-40a4-9dfe-77655818b0d2",
"name": "group-type-002",
"description": "Test group type 2",
"is_public": True,
- "created_at": "2017-06-19T01:52:47Z",
- "group_specs": {},
+ "group_specs": {
+ "consistent_group_snapshot_enabled": "<is> False"
+ }
},
{
"id": "c5c4769e-213c-40a6-a568-8e797bb691d4",
"name": "group-type-003",
"description": "Test group type 3",
"is_public": True,
- "created_at": "2017-06-18T06:34:32Z",
- "group_specs": {},
+ "group_specs": {
+ "consistent_group_snapshot_enabled": "<is> False"
+ }
}
]
}
@@ -140,15 +145,12 @@ class TestGroupTypesClient(base.BaseServiceTest):
def _test_update_group_types(self, bytes_body=False):
resp_body = copy.deepcopy(self.FAKE_INFO_GROUP_TYPE)
- resp_body['group_type'].pop('created_at')
-
self.check_service_client_function(
self.client.update_group_type,
'tempest.lib.common.rest_client.RestClient.put',
resp_body,
bytes_body,
- group_type_id="3fbbcccf-d058-4502-8844-6feeffdf4cb5",
- name='updated-group-type-name')
+ group_type_id="3fbbcccf-d058-4502-8844-6feeffdf4cb5")
def _test_create_or_update_group_type_specs(self, bytes_body=False):
group_specs = self.FAKE_CREATE_GROUP_TYPE_SPECS['group_specs']
|
objectstore: get returns object not path
Change objectstore.get to return an object or None instead of a path. | @@ -264,12 +264,12 @@ class ObjectStore(contextlib.AbstractContextManager):
prefix=prefix,
suffix=suffix)
- @contextlib.contextmanager
def get(self, object_id):
- with Object(self) as obj:
- obj.base = object_id
- with obj.read() as path:
- yield path
+ if not self.contains(object_id):
+ return None
+
+ obj = self.new(base_id=object_id)
+ return obj
def new(self, base_id=None):
"""Creates a new temporary `Object`.
|
Update release instructions to include Zenodo citation update
Fixes: | @@ -227,6 +227,19 @@ Be sure to include the whl file as an attachment.
If there are unreleased notebooks, that are under testing (`NOTEBOOKS_DEPENDING_ON_UNRELEASED_FEATURES` is not empty in [dev_tools/notebooks/isolated_notebook_test.py](dev_tools/notebooks/isolated_notebook_test.py)), follow the steps in our [notebooks guide](docs/dev/notebooks.md).
+### Create zenodo release
+
+Got to the [Zenodo release page](https://zenodo.org/record/6599601#.YpZCspPMLzc).
+Login using credentials within Google's internal password utility (or get
+someone from Google to do this). Click "New Version".
+
+* Upload the new zip file (found in releases page under "assets").
+* Remove old zip file.
+* Update version.
+* Double check all other fields.
+* Click publish.
+
+
### Email cirq-announce
Lastly, email [email protected] with the release notes
|
Add iter method to BlockStore trait
The iter method Iterates through the BlockStore from the chainhead back,
returning references to Blocks. | @@ -31,11 +31,15 @@ pub trait BlockStore {
fn delete(&mut self, block_ids: Vec<String>) -> Result<Vec<Block>, BlockStoreError>;
fn put(&mut self, blocks: Vec<Block>) -> Result<(), BlockStoreError>;
+
+ fn iter<'a>(&'a self) -> Box<Iterator<Item = &'a Block> + 'a>;
}
#[derive(Default)]
pub struct InMemoryBlockStore {
block_by_block_id: HashMap<String, Block>,
+ chain_head_num: u64,
+ chain_head_id: String,
}
impl InMemoryBlockStore {
@@ -78,11 +82,23 @@ impl BlockStore for InMemoryBlockStore {
fn put(&mut self, blocks: Vec<Block>) -> Result<(), BlockStoreError> {
blocks.into_iter().for_each(|block| {
+ if block.block_num > self.chain_head_num {
+ self.chain_head_id = block.header_signature.clone();
+ self.chain_head_num = block.block_num;
+ }
+
self.block_by_block_id
.insert(block.header_signature.clone(), block);
});
Ok(())
}
+
+ fn iter<'a>(&'a self) -> Box<Iterator<Item = &'a Block> + 'a> {
+ Box::new(InMemoryIter {
+ blockstore: self,
+ head: &self.chain_head_id,
+ })
+ }
}
struct InMemoryGetBlockIterator<'a> {
@@ -116,3 +132,20 @@ impl<'a> Iterator for InMemoryGetBlockIterator<'a> {
block
}
}
+
+struct InMemoryIter<'a> {
+ blockstore: &'a InMemoryBlockStore,
+ head: &'a str,
+}
+
+impl<'a> Iterator for InMemoryIter<'a> {
+ type Item = &'a Block;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let block = self.blockstore.get_block_by_block_id(self.head);
+ if let Some(b) = block {
+ self.head = &b.previous_block_id;
+ }
+ block
+ }
+}
|
use propagate_names instead of propagate_names_for_reduction for cumsum and cumprod
Summary: Pull Request resolved: | @@ -168,7 +168,7 @@ Tensor cumsum(const Tensor& self, int64_t dim, c10::optional<ScalarType> dtype)
NoNamesGuard guard;
return at::_cumsum(integer_upcast(self, dtype), dim);
}();
- namedinference::propagate_names_for_reduction(result, self, dim, /*keepdim=*/true);
+ namedinference::propagate_names(result, self);
return result;
}
@@ -185,7 +185,7 @@ Tensor& cumsum_out(Tensor& result, const Tensor& self, int64_t dim, c10::optiona
NoNamesGuard guard;
at::_cumsum_out(result, self.toType(result.scalar_type()), dim);
}
- namedinference::propagate_names_for_reduction(result, self, dim, /*keepdim=*/true);
+ namedinference::propagate_names(result, self);
return result;
}
@@ -194,7 +194,7 @@ Tensor cumprod(const Tensor& self, int64_t dim, c10::optional<ScalarType> dtype)
NoNamesGuard guard;
return at::_cumprod(integer_upcast(self, dtype), dim);
}();
- namedinference::propagate_names_for_reduction(result, self, dim, /*keepdim=*/true);
+ namedinference::propagate_names(result, self);
return result;
}
@@ -211,7 +211,7 @@ Tensor& cumprod_out(Tensor& result, const Tensor& self, int64_t dim, c10::option
NoNamesGuard guard;
at::_cumprod_out(result, self.toType(result.scalar_type()), dim);
}
- namedinference::propagate_names_for_reduction(result, self, dim, /*keepdim=*/true);
+ namedinference::propagate_names(result, self);
return result;
}
|
settings: Enable or disable delete limit setting before saving.
We enable or disable the delete limit setting immediately on
changing the "Who can delete their own message" dropdown
before saving the changes. | @@ -1197,6 +1197,22 @@ export function build_page() {
update_message_edit_sub_settings(is_checked);
});
+ $("#id_realm_delete_own_message_policy").on("change", (e) => {
+ const setting_value = Number.parseInt($(e.target).val(), 10);
+ const disable_limit_setting =
+ setting_value === settings_config.common_message_policy_values.by_admins_only.code;
+ settings_ui.disable_sub_setting_onchange(
+ !disable_limit_setting,
+ "id_realm_msg_delete_limit_setting",
+ true,
+ );
+ settings_ui.disable_sub_setting_onchange(
+ !disable_limit_setting,
+ "id_realm_message_content_delete_limit_minutes",
+ true,
+ );
+ });
+
$("#id_realm_org_join_restrictions").on("click", (e) => {
// This prevents the disappearance of modal when there are
// no allowed domains otherwise it gets closed due to
|
Added the argument transform_input in docs of InceptionV3
Including the `transform_input` argument in the docs of inceptionV3 | @@ -23,6 +23,8 @@ def inception_v3(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
+ transform_input (bool): If True, preprocesses the input according to the method with which it
+ was trained on ImageNet. Default: *False*
"""
if pretrained:
if 'transform_input' not in kwargs:
|
Update tutorial.rst
Github link fixed for version 1.6.0 | @@ -4,4 +4,4 @@ Tutorial
The agate tutorial is now available in new-and-improved Jupyter Notebook format.
-Find it `on Github <https://github.com/wireservice/agate/blob/1.5.6/tutorial.ipynb>`_!
+Find it `on Github <https://github.com/wireservice/agate/blob/1.6.0/tutorial.ipynb>`_!
|
Fixed import error in environments __init__
Failing to import atari failed also to import Gym
Now atari and gym are independent | @@ -3,19 +3,25 @@ __extras__ = []
from .environment import Environment, MDPInfo
try:
Atari = None
- Gym = None
from .atari import Atari
__extras__.append('Atari')
+except ImportError:
+ pass
+
+try:
+ Gym = None
from .gym_env import Gym
__extras__.append('Gym')
except ImportError:
pass
+
try:
Mujoco = None
from .mujoco import Mujoco
__extras__.append('Mujoco')
except ImportError:
pass
+
from .car_on_hill import CarOnHill
from .generators.simple_chain import generate_simple_chain
from .grid_world import GridWorld, GridWorldVanHasselt
|
Update device.py
Black tool related formatting | @@ -1110,6 +1110,7 @@ class Device(_Connection):
or kwargs.get("cs_user") is not None
):
from jnpr.junos.console import Console
+
if kwargs.get("conn_open_timeout", None):
# Console already supports timeout while opening connections
# via `timeout` parameter. Refer `Console` documentation
|
travis v2
Attempting to get build to work. | language: python
-# Setting sudo to false opts in to Travis-CI container-based builds.
-sudo: false
-
python:
- 2.7
- 3.6
@@ -22,6 +19,7 @@ install:
- conda create --yes -n test python=$TRAVIS_PYTHON_VERSION
- source activate test
- pip install coveralls
+ - pip install -U pip
- python setup.py install
script: $CMD
|
Downloader middleware: raise _InvalidOutput
Instead of AssertionError, to make it consistent with spider middleware | @@ -7,6 +7,7 @@ import six
from twisted.internet import defer
+from scrapy.exceptions import _InvalidOutput
from scrapy.http import Request, Response
from scrapy.middleware import MiddlewareManager
from scrapy.utils.defer import mustbe_deferred
@@ -35,9 +36,9 @@ class DownloaderMiddlewareManager(MiddlewareManager):
def process_request(request):
for method in self.methods['process_request']:
response = yield method(request=request, spider=spider)
- assert response is None or isinstance(response, (Response, Request)), \
- 'Middleware %s.process_request must return None, Response or Request, got %s' % \
- (six.get_method_self(method).__class__.__name__, response.__class__.__name__)
+ if response is not None and not isinstance(response, (Response, Request)):
+ raise _InvalidOutput('Middleware %s.process_request must return None, Response or Request, got %s' % \
+ (six.get_method_self(method).__class__.__name__, response.__class__.__name__))
if response:
defer.returnValue(response)
defer.returnValue((yield download_func(request=request,spider=spider)))
@@ -51,9 +52,9 @@ class DownloaderMiddlewareManager(MiddlewareManager):
for method in self.methods['process_response']:
response = yield method(request=request, response=response,
spider=spider)
- assert isinstance(response, (Response, Request)), \
- 'Middleware %s.process_response must return Response or Request, got %s' % \
- (six.get_method_self(method).__class__.__name__, type(response))
+ if not isinstance(response, (Response, Request)):
+ raise _InvalidOutput('Middleware %s.process_response must return Response or Request, got %s' % \
+ (six.get_method_self(method).__class__.__name__, type(response)))
if isinstance(response, Request):
defer.returnValue(response)
defer.returnValue(response)
@@ -64,9 +65,9 @@ class DownloaderMiddlewareManager(MiddlewareManager):
for method in self.methods['process_exception']:
response = yield method(request=request, exception=exception,
spider=spider)
- assert response is None or isinstance(response, (Response, Request)), \
- 'Middleware %s.process_exception must return None, Response or Request, got %s' % \
- (six.get_method_self(method).__class__.__name__, type(response))
+ if response is not None and not isinstance(response, (Response, Request)):
+ raise _InvalidOutput('Middleware %s.process_exception must return None, Response or Request, got %s' % \
+ (six.get_method_self(method).__class__.__name__, type(response)))
if response:
defer.returnValue(response)
defer.returnValue(_failure)
|
swarming: update setup_bigquery.sh
completed_time was renamed to end_time, and created_time never existed.
Tested by running against a dummy Swarming instance. | @@ -84,7 +84,7 @@ fi
if ! (bqschemaupdater -force \
-message swarming.v1.TaskRequest \
-table ${APPID}.swarming.task_requests \
- -partitioning-field created_time); then
+ -partitioning-field create_time); then
echo ""
echo ""
echo "Oh no! You may need to restart from scratch. You can do so with:"
@@ -97,7 +97,7 @@ fi
if ! (bqschemaupdater -force \
-message swarming.v1.TaskResult \
-table ${APPID}.swarming.task_results \
- -partitioning-field completed_time); then
+ -partitioning-field end_time); then
echo ""
echo ""
echo "Oh no! You may need to restart from scratch. You can do so with:"
|
TST: added download date range unit test
Added a unit test for download date requests that
result in empty date ranges. | @@ -560,13 +560,26 @@ class TestBasics():
def test_download_recent_data(self, caplog):
with caplog.at_level(logging.INFO, logger='pysat'):
self.testInst.download()
- # Tells user that recent data will be downloaded
+
+ # Ensure user was told that recent data will be downloaded
assert "most recent data by default" in caplog.text
- # download new files
+
+ # Ensure user was notified of new files being download
assert "Downloading data to" in caplog.text
- # Update local file list
+
+ # Ensure user was notified of updates to the local file list
assert "Updating pysat file list" in caplog.text
+ def test_download_bad_date_range(self, caplog):
+ """Test download with bad date input."""
+ with caplog.at_level(logging.WARNING, logger='pysat'):
+ self.testInst.download(start=self.ref_time,
+ stop=self.ref_time - dt.timedelta(days=10))
+
+ # Ensure user is warned about not calling download due to bad time input
+ assert "Requested download over an empty date range" in caplog.text
+ return
+
# -------------------------------------------------------------------------
#
# Test date helpers
|
[tests/brightness] Disable failing test
Honestly: I don't know why Travis is failing on this test, it works fine
on my machine with Python 2.7. Therefore, I will disable this test until
I can get to the bottom of it. | @@ -44,15 +44,15 @@ class TestBrightnessModule(unittest.TestCase):
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_DOWN, inp=self.input, module=module)
self.popen.assert_call("xbacklight -10%")
- @mock.patch('bumblebee.modules.brightness.open', create=True)
- def test_update(self, mock_open):
- mock_open.side_effect = [
- mock.mock_open(read_data="20").return_value,
- mock.mock_open(read_data="100").return_value
- ]
- self.module.update_all()
- self.assertEquals(self.module.brightness(self.anyWidget), "020%")
- self.assertEquals(len(self.module.brightness(self.anyWidget)), len("100%"))
+# @mock.patch('bumblebee.modules.brightness.open', create=True)
+# def test_update(self, mock_open):
+# mock_open.side_effect = [
+# mock.mock_open(read_data="20").return_value,
+# mock.mock_open(read_data="100").return_value
+# ]
+# self.module.update_all()
+# self.assertEquals(self.module.brightness(self.anyWidget), "020%")
+# self.assertEquals(len(self.module.brightness(self.anyWidget)), len("100%"))
@mock.patch('bumblebee.modules.brightness.open', create=True)
def test_error(self,mock_open):
|
[internal] fix starts_with -> startswith
[ci skip-rust]
[ci skip-build-wheels] | @@ -108,7 +108,7 @@ class RunTracker:
return [
backend
for backend in self._all_options.for_global_scope().backend_packages
- if backend.starts_with("pants.backend.")
+ if backend.startswith("pants.backend.")
]
def start(self, run_start_time: float, specs: list[str]) -> None:
|
Update http to https
modify http link to https link | @@ -59,6 +59,6 @@ Search Trove Documentation
.. _Trove Wiki: https://wiki.openstack.org/wiki/Trove
.. _Trove: https://git.openstack.org/cgit/openstack/trove
.. _Trove Client: https://git.openstack.org/cgit/openstack/python-troveclient
-.. _Trove API Documentation: http://developer.openstack.org/api-ref/database/
+.. _Trove API Documentation: https://developer.openstack.org/api-ref/database/
.. _Trove Blueprints: https://blueprints.launchpad.net/trove
.. _Trove Bugs: https://bugs.launchpad.net/trove
|
apparently .min() .max() methods are faster than np.min()
that speeds up unit_check which is sometimes in a tight loop,
so the speed up can be ~ 25% | @@ -331,7 +331,7 @@ def unitcheck(u, nonbounded=None):
if nonbounded is None:
# No periodic boundary conditions provided.
- return np.min(u) > 0 and np.max(u) < 1
+ return u.min() > 0 and u.max() < 1
else:
# Alternating periodic and non-periodic boundary conditions.
unb = u[nonbounded]
|
Update README.md
fix links in readme | @@ -8,13 +8,13 @@ Sample histology images from a trained histology image model.
This examples generates 32 images (256px) combined into one output file.
-
+
## Model
The code is from a PyTorch implementation of VQ-VAE-2:
-
+[Implementation of Generating Diverse High-Fidelity Images with VQ-VAE-2 in PyTorch](https://github.com/rosinality/vq-vae-2-pytorch)
## Training data
|
fix(mediation): grant handler exceptions
Raise HandlerException on record missing | from .....messaging.base_handler import (
BaseHandler, BaseResponder, HandlerException, RequestContext
)
+from .....storage.error import StorageNotFoundError
from ..manager import MediationManager
from ..messages.mediate_grant import MediationGrant
from ..models.mediation_record import MediationRecord
@@ -19,10 +20,16 @@ class MediationGrantHandler(BaseHandler):
assert isinstance(context.message, MediationGrant)
if not context.connection_ready:
- raise HandlerException("Invalid mediation request: no active connection")
+ raise HandlerException('Recieved mediation grant from inactive connection')
mgr = MediationManager(context)
+ try:
record = await MediationRecord.retrieve_by_connection_id(
context, context.connection_record.connection_id
)
- await mgr.request_granted(record)
+ await mgr.request_granted(record, context.message)
+ except StorageNotFoundError as err:
+ raise HandlerException(
+ 'Received mediation grant from connection from which mediation '
+ 'has not been requested.'
+ ) from err
|
Update Max9744.py
Fixed some spelling errors | -# Script to change the voulme of the Max8744
+# Script to change the volume of the Max9744
# It's similar to the pcf8574 in that it only writes a single byte
-# The voume is controlled by writing a value between 0 and 63
+# The volume is controlled by writing a value between 0 and 63
volume = 16
arduino = Runtime.start("arduino","Arduino")
arduino.connect("COM8")
|
client: fix an incorrect title in a task
This task would be run on both containerized *and* non containerized
deployment.
Let's have a proper title to avoid confusion. | ---
-- name: copy ceph admin keyring when non containerized deployment
+- name: copy ceph admin keyring
copy:
src: "{{ fetch_directory }}/{{ fsid }}/etc/ceph/{{ cluster }}.client.admin.keyring"
dest: "/etc/ceph/"
|
Do Python version check in init
* Do Python version check in init
This does the Python version check in a similar manner as NumPy and SciPy equivilent checks.
* Fix pylint issues
* Pylint 10/10 | # -*- coding: utf-8 -*-
# pylint: disable=wrong-import-order
+# pylint: disable=wrong-import-position
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# =============================================================================
"""Main QISKit public functionality."""
+
+import sys
+# Check for Python version 3.5+
+if sys.version_info < (3, 5):
+ raise Exception('QISKit requires Python version 3.5 or greater.')
+
+# Check for required ibmqe version
+from ._util import _check_ibmqe_version
+_check_ibmqe_version()
+
from IBMQuantumExperience import RegisterSizeError
from ._qiskiterror import QISKitError
@@ -39,8 +50,5 @@ from ._jobprocessor import JobProcessor
from ._quantumjob import QuantumJob
from ._quantumprogram import QuantumProgram
from ._result import Result
-from ._util import _check_ibmqe_version
__version__ = '0.5.0'
-
-_check_ibmqe_version()
|
buildCompileCommands.py : Fix for Python 3
And remove hardcoded path. | @@ -16,7 +16,7 @@ import subprocess
# Make SCons tell us everything it would do to build Gaffer
subprocess.check_call( [ "scons", "--clean" ] )
-sconsOutput = subprocess.check_output( [ "scons", "build", "--dry-run", "--no-cache" ] )
+sconsOutput = subprocess.check_output( [ "scons", "build", "--dry-run", "--no-cache" ], universal_newlines = True )
# Write that into a "compile_commands.json" file
@@ -31,7 +31,7 @@ for line in sconsOutput.split( "\n" ) :
file = line.split()[-1]
data.append(
{
- "directory" : "/Users/john/dev/gaffer",
+ "directory" : os.getcwd(),
"command" : line,
"file" : file,
}
|
fix: ironic_console group is optional
Without '|d' it fails at haproxy configuration | @@ -382,7 +382,7 @@ haproxy_nova_console_http_mode: "{{ not (nova_console_user_ssl_cert is defined
and nova_console_user_ssl_key is defined) }}"
haproxy_nova_console_service:
haproxy_service_name: nova_console
- haproxy_backend_nodes: "{{ groups['nova_console'] | default([]) + ((ironic_console_type == nova_console_type) | ternary(groups['ironic_console'], [])) }}"
+ haproxy_backend_nodes: "{{ groups['nova_console'] | default([]) + ((ironic_console_type == nova_console_type) | ternary(groups['ironic_console'] | default([]), [])) }}"
haproxy_ssl: "{{ haproxy_ssl }}"
haproxy_ssl_all_vips: "{{ haproxy_ssl_all_vips }}"
haproxy_port: "{{ nova_console_port }}"
@@ -398,7 +398,7 @@ haproxy_nova_console_service:
# nova-compute managed virtual machines
haproxy_nova_ironic_console_service:
haproxy_service_name: nova_ironic_console
- haproxy_backend_nodes: "{{ groups['ironic_console'] }}"
+ haproxy_backend_nodes: "{{ groups['ironic_console'] | default([]) }}"
haproxy_ssl: "{{ haproxy_ssl }}"
haproxy_ssl_all_vips: "{{ haproxy_ssl_all_vips }}"
haproxy_port: "{{ ironic_console_port }}"
|
Conditionally reconnect RobustConnection
RobustConnection was reconnecting whenever the connection was lost,
even if the connection had been closed on pupose by invoking
`connection.close()`. The reconnect code now tests if a reconnect
attempt should be made at all by consulting `self._closed`. | @@ -75,6 +75,9 @@ class RobustConnection(Connection):
super()._on_connection_close(connection, closing)
+ if self._closed:
+ return
+
log.info(
"Connection to %s closed. Reconnecting after %r seconds.",
self,
|
reexport names in 3/dateutil.tz
This was missed in and should completely fix | -from .tz import tzutc, tzoffset, tzlocal, tzfile, tzrange, tzstr, tzical, gettz, datetime_exists, datetime_ambiguous
+from .tz import (
+ tzutc as tzutc,
+ tzoffset as tzoffset,
+ tzlocal as tzlocal,
+ tzfile as tzfile,
+ tzrange as tzrange,
+ tzstr as tzstr,
+ tzical as tzical,
+ gettz as gettz,
+ datetime_exists as datetime_exists,
+ datetime_ambiguous as datetime_ambiguous
+)
|
Update dependencies.rst
typos etc. | Install Python dependencies
---------------------------
-After you cloned the RDMO repository, change to the created directory, create a ``virtualenv <https://virtualenv.readthedocs.org>``_ and install the required dependencies:
+After you have cloned the RDMO repository, change to the created directory, create a ``virtualenv <https://virtualenv.readthedocs.org>``_ and install the required dependencies:
.. code:: bash
|
Adding two incidents in Orlando
Couldn't find date for first incident, not entirely sure if second one belongs. | @@ -7,3 +7,22 @@ It is seen in the video that while the people were protesting, a few cops tried
**Links**
* https://twitter.com/thickliljawn/status/1267239498083110913
+
+
+## Orlando
+
+### Police fire on protesters outside city hall | Date unknown
+
+Police open fire on protesters outside of city hall with teargas, flashbands, and apparently rubber bullets.
+
+**Links**
+
+* https://twitter.com/greg_doucette/status/1269017349727928320
+
+
+### Police box in protesters before curfew for mass arrest | June 3rd
+
+Police form barricades using bicycles to trap protesters leaving before curfew.
+
+**Links**
+* https://twitter.com/filfranca/status/1268343603832774659
|
Update faq.rst
Fixed grammar | @@ -195,7 +195,7 @@ How can I create an open source derivative work of Mattermost?
If you're looking to customize the look and feel of Mattermost, see `documentation on customization <https://github.com/mattermost/docs/issues/1006>`_. For advanced customization, the user experience of the system is available in different repositories for web, mobile apps and desktop apps, and custom experiences can be developed and integrated with either Mattermost Team Edition or Mattermost Enterprise Edition via the system APIs and drivers.
-If instead of using Mattermost Team Edition or Mattermost Enterprise Edition, you choose to compile your own version of the system using the open source code from ``/mattermost-server``, there are a number of factors to consider:
+If, instead of using Mattermost Team Edition or Mattermost Enterprise Edition, you choose to compile your own version of the system using the open source code from ``/mattermost-server``, there are a number of factors to consider:
Security
|
encode unicode data
This partially reverts | @@ -224,6 +224,8 @@ def simple_post(domain, url, data, *, headers, auth, verify,
POST with a cleaner API, and return the actual HTTPResponse object, so
that error codes can be interpreted.
"""
+ if isinstance(data, str):
+ data = data.encode('utf-8') # can't pass unicode to http request posts
default_headers = CaseInsensitiveDict({
"content-type": "text/xml",
"content-length": str(len(data)),
|
Simplify code to set region and partition values
Validate the partition arg using the `choices` kwarg in `add_argument`.
Select the main region for the given partition using the dict defined in
the common module. | import json
import re
-import sys
from collections import OrderedDict
import argparse
import boto3
from botocore.exceptions import ClientError
+from common import PARTITION_TO_MAIN_REGION, PARTITIONS
+
DISTROS = OrderedDict(
[
("alinux", "amzn"),
@@ -341,7 +342,9 @@ def parse_args():
"--json-regions", type=str, help="path to input json file containing the regions", required=False
)
parser.add_argument("--txt-file", type=str, help="txt output file path", required=False, default="amis.txt")
- parser.add_argument("--partition", type=str, help="commercial | china | govcloud", required=True)
+ parser.add_argument(
+ "--partition", type=str, help="commercial | china | govcloud", required=True, choices=PARTITIONS
+ )
parser.add_argument("--account-id", type=str, help="AWS account id owning the AMIs", required=True)
parser.add_argument(
"--cloudformation-template",
@@ -356,16 +359,7 @@ def parse_args():
def main():
"""Run the script."""
args = parse_args()
-
- if args.partition == "commercial":
- region = "us-east-1"
- elif args.partition == "govcloud":
- region = "us-gov-west-1"
- elif args.partition == "china":
- region = "cn-north-1"
- else:
- print("Unsupported partition %s" % args.partition)
- sys.exit(1)
+ region = PARTITION_TO_MAIN_REGION.get(args.partition)
credentials = []
if args.credential:
|
Skip noobaa health check for OCP 4.10 + ODF 4.9 Live deploy
Reason: Due to bug
noobaa-default-backing-store and noobaa-default-bucket-class in rejected state | @@ -26,6 +26,7 @@ from ocs_ci.ocs.resources import ocs, storage_cluster
import ocs_ci.ocs.constants as constant
from ocs_ci.ocs import defaults
from ocs_ci.ocs.resources.mcg import MCG
+from ocs_ci.utility import version
from ocs_ci.utility.retry import retry
from ocs_ci.utility.utils import (
TimeoutSampler,
@@ -308,6 +309,20 @@ class CephCluster(object):
config.ENV_DATA["platform"] not in constants.MANAGED_SERVICE_PLATFORMS
and not config.COMPONENTS["disable_noobaa"]
):
+ # skip noobaa healthcheck due to bug https://bugzilla.redhat.com/show_bug.cgi?id=2075422
+ if (
+ version.get_semantic_ocp_version_from_config() == version.VERSION_4_10
+ and version.get_semantic_ocs_version_from_config()
+ == version.VERSION_4_9
+ and config.DEPLOYMENT.get("live_deployment")
+ and version.get_semantic_version(
+ config.UPGRADE.get("upgrade_ocs_version"), only_major_minor=True
+ )
+ == version.VERSION_4_10
+ ):
+ logger.info("skipping noobaa health check due to bug 2075422")
+ return
+ else:
self.wait_for_noobaa_health_ok()
def noobaa_health_check(self):
|
Fix min and max functions
remove | @@ -177,11 +177,7 @@ class BaseFunctions(ABC):
series = self.to_float(series)
if string or str(series.dtype) in self.constants.STRING_TYPES:
- series.dropna(inplace=True)
- try:
- return series.min()
- except:
- return self.to_string(series).min()
+ return self.to_string(series.dropna()).min()
else:
return series.min()
@@ -191,11 +187,7 @@ class BaseFunctions(ABC):
series = self.to_float(series)
if string or str(series.dtype) in self.constants.STRING_TYPES:
- series.dropna(inplace=True)
- try:
- return series.max()
- except:
- return self.to_string(series).max()
+ return self.to_string(series.dropna()).max()
else:
return series.max()
|
Use match rt. cast+then
In SimpleTypeRef.check_correctness_pre. Also add some comments. | @@ -2098,11 +2098,17 @@ class SimpleTypeRef(TypeRef):
@langkit_property(return_type=T.SemanticResult.array)
def check_correctness_pre():
+
d = Var(Entity.type_name.referenced_decl)
+
return d.result_ref.then(
- lambda d: d.cast(T.TypeDecl).then(
- lambda _: No(T.SemanticResult.array),
- default_val=Entity.error(S("Invalid type reference")).singleton
+ lambda d: d.match(
+ # The type ref references a type decl: return no error
+ lambda _=T.TypeDecl: No(T.SemanticResult.array),
+
+ # Not a type decl: return an error that the type reference is
+ # invalid.
+ lambda _: [Entity.error(S("Invalid type reference"))]
)
)
|
Subsets and Splits