From d015a267e537f2b978d53eefb3b0f13c7d33a4e4 Mon Sep 17 00:00:00 2001 From: Josverl Date: Thu, 29 Jun 2023 01:37:06 +0000 Subject: [PATCH] Update frozen stubs for latest --- .../samd/GENERIC/asyncio/__init__.py | 31 ++ .../samd/GENERIC/asyncio/__init__.pyi | 7 + .../samd/GENERIC/asyncio/core.py | 302 ++++++++++++++++++ .../samd/GENERIC/asyncio/core.pyi | 52 +++ .../samd/GENERIC/asyncio/event.py | 66 ++++ .../samd/GENERIC/asyncio/event.pyi | 21 ++ .../samd/GENERIC/asyncio/funcs.py | 130 ++++++++ .../samd/GENERIC/asyncio/funcs.pyi | 13 + .../samd/GENERIC/asyncio/lock.py | 55 ++++ .../samd/GENERIC/asyncio/lock.pyi | 13 + .../samd/GENERIC/asyncio/stream.py | 189 +++++++++++ .../samd/GENERIC/asyncio/stream.pyi | 35 ++ .../samd/GENERIC/dht.py | 47 +++ .../samd/GENERIC/dht.pyi | 15 + .../samd/GENERIC/ds18x20.py | 52 +++ .../samd/GENERIC/ds18x20.pyi | 15 + .../samd/GENERIC/modules.json | 40 +++ .../samd/GENERIC/onewire.py | 92 ++++++ .../samd/GENERIC/onewire.pyi | 21 ++ .../samd/GENERIC/uasyncio.py | 8 + .../samd/GENERIC/uasyncio.pyi | 1 + 21 files changed, 1205 insertions(+) create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/dht.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/dht.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/onewire.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/onewire.pyi create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.py create mode 100644 stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.pyi diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.py new file mode 100644 index 000000000..1f83750c5 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.py @@ -0,0 +1,31 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019 Damien P. George + +from .core import * + +__version__ = (3, 0, 0) + +_attrs = { + "wait_for": "funcs", + "wait_for_ms": "funcs", + "gather": "funcs", + "Event": "event", + "ThreadSafeFlag": "event", + "Lock": "lock", + "open_connection": "stream", + "start_server": "stream", + "StreamReader": "stream", + "StreamWriter": "stream", +} + + +# Lazy loader, effectively does: +# global attr +# from .mod import attr +def __getattr__(attr): + mod = _attrs.get(attr, None) + if mod is None: + raise AttributeError(attr) + value = getattr(__import__(mod, None, None, True, 1), attr) + globals()[attr] = value + return value diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.pyi new file mode 100644 index 000000000..29d6c7c4e --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/__init__.pyi @@ -0,0 +1,7 @@ +from .core import * +from _typeshed import Incomplete + +__version__: Incomplete +_attrs: Incomplete + +def __getattr__(attr): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.py new file mode 100644 index 000000000..be5119ba6 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.py @@ -0,0 +1,302 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019 Damien P. George + +from time import ticks_ms as ticks, ticks_diff, ticks_add +import sys, select + +# Import TaskQueue and Task, preferring built-in C code over Python code +try: + from _asyncio import TaskQueue, Task +except: + from .task import TaskQueue, Task + + +################################################################################ +# Exceptions + + +class CancelledError(BaseException): + pass + + +class TimeoutError(Exception): + pass + + +# Used when calling Loop.call_exception_handler +_exc_context = {"message": "Task exception wasn't retrieved", "exception": None, "future": None} + + +################################################################################ +# Sleep functions + + +# "Yield" once, then raise StopIteration +class SingletonGenerator: + def __init__(self): + self.state = None + self.exc = StopIteration() + + def __iter__(self): + return self + + def __next__(self): + if self.state is not None: + _task_queue.push(cur_task, self.state) + self.state = None + return None + else: + self.exc.__traceback__ = None + raise self.exc + + +# Pause task execution for the given time (integer in milliseconds, uPy extension) +# Use a SingletonGenerator to do it without allocating on the heap +def sleep_ms(t, sgen=SingletonGenerator()): + assert sgen.state is None + sgen.state = ticks_add(ticks(), max(0, t)) + return sgen + + +# Pause task execution for the given time (in seconds) +def sleep(t): + return sleep_ms(int(t * 1000)) + + +################################################################################ +# Queue and poller for stream IO + + +class IOQueue: + def __init__(self): + self.poller = select.poll() + self.map = {} # maps id(stream) to [task_waiting_read, task_waiting_write, stream] + + def _enqueue(self, s, idx): + if id(s) not in self.map: + entry = [None, None, s] + entry[idx] = cur_task + self.map[id(s)] = entry + self.poller.register(s, select.POLLIN if idx == 0 else select.POLLOUT) + else: + sm = self.map[id(s)] + assert sm[idx] is None + assert sm[1 - idx] is not None + sm[idx] = cur_task + self.poller.modify(s, select.POLLIN | select.POLLOUT) + # Link task to this IOQueue so it can be removed if needed + cur_task.data = self + + def _dequeue(self, s): + del self.map[id(s)] + self.poller.unregister(s) + + def queue_read(self, s): + self._enqueue(s, 0) + + def queue_write(self, s): + self._enqueue(s, 1) + + def remove(self, task): + while True: + del_s = None + for k in self.map: # Iterate without allocating on the heap + q0, q1, s = self.map[k] + if q0 is task or q1 is task: + del_s = s + break + if del_s is not None: + self._dequeue(s) + else: + break + + def wait_io_event(self, dt): + for s, ev in self.poller.ipoll(dt): + sm = self.map[id(s)] + # print('poll', s, sm, ev) + if ev & ~select.POLLOUT and sm[0] is not None: + # POLLIN or error + _task_queue.push(sm[0]) + sm[0] = None + if ev & ~select.POLLIN and sm[1] is not None: + # POLLOUT or error + _task_queue.push(sm[1]) + sm[1] = None + if sm[0] is None and sm[1] is None: + self._dequeue(s) + elif sm[0] is None: + self.poller.modify(s, select.POLLOUT) + else: + self.poller.modify(s, select.POLLIN) + + +################################################################################ +# Main run loop + + +# Ensure the awaitable is a task +def _promote_to_task(aw): + return aw if isinstance(aw, Task) else create_task(aw) + + +# Create and schedule a new task from a coroutine +def create_task(coro): + if not hasattr(coro, "send"): + raise TypeError("coroutine expected") + t = Task(coro, globals()) + _task_queue.push(t) + return t + + +# Keep scheduling tasks until there are none left to schedule +def run_until_complete(main_task=None): + global cur_task + excs_all = (CancelledError, Exception) # To prevent heap allocation in loop + excs_stop = (CancelledError, StopIteration) # To prevent heap allocation in loop + while True: + # Wait until the head of _task_queue is ready to run + dt = 1 + while dt > 0: + dt = -1 + t = _task_queue.peek() + if t: + # A task waiting on _task_queue; "ph_key" is time to schedule task at + dt = max(0, ticks_diff(t.ph_key, ticks())) + elif not _io_queue.map: + # No tasks can be woken so finished running + return + # print('(poll {})'.format(dt), len(_io_queue.map)) + _io_queue.wait_io_event(dt) + + # Get next task to run and continue it + t = _task_queue.pop() + cur_task = t + try: + # Continue running the coroutine, it's responsible for rescheduling itself + exc = t.data + if not exc: + t.coro.send(None) + else: + # If the task is finished and on the run queue and gets here, then it + # had an exception and was not await'ed on. Throwing into it now will + # raise StopIteration and the code below will catch this and run the + # call_exception_handler function. + t.data = None + t.coro.throw(exc) + except excs_all as er: + # Check the task is not on any event queue + assert t.data is None + # This task is done, check if it's the main task and then loop should stop + if t is main_task: + if isinstance(er, StopIteration): + return er.value + raise er + if t.state: + # Task was running but is now finished. + waiting = False + if t.state is True: + # "None" indicates that the task is complete and not await'ed on (yet). + t.state = None + elif callable(t.state): + # The task has a callback registered to be called on completion. + t.state(t, er) + t.state = False + waiting = True + else: + # Schedule any other tasks waiting on the completion of this task. + while t.state.peek(): + _task_queue.push(t.state.pop()) + waiting = True + # "False" indicates that the task is complete and has been await'ed on. + t.state = False + if not waiting and not isinstance(er, excs_stop): + # An exception ended this detached task, so queue it for later + # execution to handle the uncaught exception if no other task retrieves + # the exception in the meantime (this is handled by Task.throw). + _task_queue.push(t) + # Save return value of coro to pass up to caller. + t.data = er + elif t.state is None: + # Task is already finished and nothing await'ed on the task, + # so call the exception handler. + _exc_context["exception"] = exc + _exc_context["future"] = t + Loop.call_exception_handler(_exc_context) + + +# Create a new task from a coroutine and run it until it finishes +def run(coro): + return run_until_complete(create_task(coro)) + + +################################################################################ +# Event loop wrapper + + +async def _stopper(): + pass + + +_stop_task = None + + +class Loop: + _exc_handler = None + + def create_task(coro): + return create_task(coro) + + def run_forever(): + global _stop_task + _stop_task = Task(_stopper(), globals()) + run_until_complete(_stop_task) + # TODO should keep running until .stop() is called, even if there're no tasks left + + def run_until_complete(aw): + return run_until_complete(_promote_to_task(aw)) + + def stop(): + global _stop_task + if _stop_task is not None: + _task_queue.push(_stop_task) + # If stop() is called again, do nothing + _stop_task = None + + def close(): + pass + + def set_exception_handler(handler): + Loop._exc_handler = handler + + def get_exception_handler(): + return Loop._exc_handler + + def default_exception_handler(loop, context): + print(context["message"]) + print("future:", context["future"], "coro=", context["future"].coro) + sys.print_exception(context["exception"]) + + def call_exception_handler(context): + (Loop._exc_handler or Loop.default_exception_handler)(Loop, context) + + +# The runq_len and waitq_len arguments are for legacy uasyncio compatibility +def get_event_loop(runq_len=0, waitq_len=0): + return Loop + + +def current_task(): + return cur_task + + +def new_event_loop(): + global _task_queue, _io_queue + # TaskQueue of Task instances + _task_queue = TaskQueue() + # Task queue and poller for stream IO + _io_queue = IOQueue() + return Loop + + +# Initialise default event loop +new_event_loop() diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.pyi new file mode 100644 index 000000000..775c3af18 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/core.pyi @@ -0,0 +1,52 @@ +from .task import Task as Task, TaskQueue as TaskQueue +from _typeshed import Incomplete + +class CancelledError(BaseException): ... +class TimeoutError(Exception): ... + +_exc_context: Incomplete + +class SingletonGenerator: + state: Incomplete + exc: Incomplete + def __init__(self) -> None: ... + def __iter__(self): ... + def __next__(self) -> None: ... + +def sleep_ms(t, sgen=...): ... +def sleep(t): ... + +class IOQueue: + poller: Incomplete + map: Incomplete + def __init__(self) -> None: ... + def _enqueue(self, s, idx) -> None: ... + def _dequeue(self, s) -> None: ... + def queue_read(self, s) -> None: ... + def queue_write(self, s) -> None: ... + def remove(self, task) -> None: ... + def wait_io_event(self, dt) -> None: ... + +def _promote_to_task(aw): ... +def create_task(coro): ... +def run_until_complete(main_task: Incomplete | None = ...): ... +def run(coro): ... +async def _stopper() -> None: ... + +_stop_task: Incomplete + +class Loop: + _exc_handler: Incomplete + def create_task(coro): ... + def run_forever() -> None: ... + def run_until_complete(aw): ... + def stop() -> None: ... + def close() -> None: ... + def set_exception_handler(handler) -> None: ... + def get_exception_handler(): ... + def default_exception_handler(loop, context) -> None: ... + def call_exception_handler(context) -> None: ... + +def get_event_loop(runq_len: int = ..., waitq_len: int = ...): ... +def current_task(): ... +def new_event_loop(): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.py new file mode 100644 index 000000000..e0b41f732 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.py @@ -0,0 +1,66 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019-2020 Damien P. George + +from . import core + + +# Event class for primitive events that can be waited on, set, and cleared +class Event: + def __init__(self): + self.state = False # False=unset; True=set + self.waiting = core.TaskQueue() # Queue of Tasks waiting on completion of this event + + def is_set(self): + return self.state + + def set(self): + # Event becomes set, schedule any tasks waiting on it + # Note: This must not be called from anything except the thread running + # the asyncio loop (i.e. neither hard or soft IRQ, or a different thread). + while self.waiting.peek(): + core._task_queue.push(self.waiting.pop()) + self.state = True + + def clear(self): + self.state = False + + # async + def wait(self): + if not self.state: + # Event not set, put the calling task on the event's waiting queue + self.waiting.push(core.cur_task) + # Set calling task's data to the event's queue so it can be removed if needed + core.cur_task.data = self.waiting + yield + return True + + +# MicroPython-extension: This can be set from outside the asyncio event loop, +# such as other threads, IRQs or scheduler context. Implementation is a stream +# that asyncio will poll until a flag is set. +# Note: Unlike Event, this is self-clearing after a wait(). +try: + import io + + class ThreadSafeFlag(io.IOBase): + def __init__(self): + self.state = 0 + + def ioctl(self, req, flags): + if req == 3: # MP_STREAM_POLL + return self.state * flags + return None + + def set(self): + self.state = 1 + + def clear(self): + self.state = 0 + + async def wait(self): + if not self.state: + yield core._io_queue.queue_read(self) + self.state = 0 + +except ImportError: + pass diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.pyi new file mode 100644 index 000000000..731c304c9 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/event.pyi @@ -0,0 +1,21 @@ +import io +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Event: + state: bool + waiting: Incomplete + def __init__(self) -> None: ... + def is_set(self): ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self) -> Generator[None, None, Incomplete]: ... + +class ThreadSafeFlag(io.IOBase): + state: int + def __init__(self) -> None: ... + def ioctl(self, req, flags): ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.py new file mode 100644 index 000000000..599091dfb --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.py @@ -0,0 +1,130 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019-2022 Damien P. George + +from . import core + + +async def _run(waiter, aw): + try: + result = await aw + status = True + except BaseException as er: + result = None + status = er + if waiter.data is None: + # The waiter is still waiting, cancel it. + if waiter.cancel(): + # Waiter was cancelled by us, change its CancelledError to an instance of + # CancelledError that contains the status and result of waiting on aw. + # If the wait_for task subsequently gets cancelled externally then this + # instance will be reset to a CancelledError instance without arguments. + waiter.data = core.CancelledError(status, result) + + +async def wait_for(aw, timeout, sleep=core.sleep): + aw = core._promote_to_task(aw) + if timeout is None: + return await aw + + # Run aw in a separate runner task that manages its exceptions. + runner_task = core.create_task(_run(core.cur_task, aw)) + + try: + # Wait for the timeout to elapse. + await sleep(timeout) + except core.CancelledError as er: + status = er.value + if status is None: + # This wait_for was cancelled externally, so cancel aw and re-raise. + runner_task.cancel() + raise er + elif status is True: + # aw completed successfully and cancelled the sleep, so return aw's result. + return er.args[1] + else: + # aw raised an exception, propagate it out to the caller. + raise status + + # The sleep finished before aw, so cancel aw and raise TimeoutError. + runner_task.cancel() + await runner_task + raise core.TimeoutError + + +def wait_for_ms(aw, timeout): + return wait_for(aw, timeout, core.sleep_ms) + + +class _Remove: + @staticmethod + def remove(t): + pass + + +# async +def gather(*aws, return_exceptions=False): + if not aws: + return [] + + def done(t, er): + # Sub-task "t" has finished, with exception "er". + nonlocal state + if gather_task.data is not _Remove: + # The main gather task has already been scheduled, so do nothing. + # This happens if another sub-task already raised an exception and + # woke the main gather task (via this done function), or if the main + # gather task was cancelled externally. + return + elif not return_exceptions and not isinstance(er, StopIteration): + # A sub-task raised an exception, indicate that to the gather task. + state = er + else: + state -= 1 + if state: + # Still some sub-tasks running. + return + # Gather waiting is done, schedule the main gather task. + core._task_queue.push(gather_task) + + ts = [core._promote_to_task(aw) for aw in aws] + for i in range(len(ts)): + if ts[i].state is not True: + # Task is not running, gather not currently supported for this case. + raise RuntimeError("can't gather") + # Register the callback to call when the task is done. + ts[i].state = done + + # Set the state for execution of the gather. + gather_task = core.cur_task + state = len(ts) + cancel_all = False + + # Wait for the a sub-task to need attention. + gather_task.data = _Remove + try: + yield + except core.CancelledError as er: + cancel_all = True + state = er + + # Clean up tasks. + for i in range(len(ts)): + if ts[i].state is done: + # Sub-task is still running, deregister the callback and cancel if needed. + ts[i].state = True + if cancel_all: + ts[i].cancel() + elif isinstance(ts[i].data, StopIteration): + # Sub-task ran to completion, get its return value. + ts[i] = ts[i].data.value + else: + # Sub-task had an exception with return_exceptions==True, so get its exception. + ts[i] = ts[i].data + + # Either this gather was cancelled, or one of the sub-tasks raised an exception with + # return_exceptions==False, so reraise the exception here. + if state: + raise state + + # Return the list of return values of each sub-task. + return ts diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.pyi new file mode 100644 index 000000000..1e111c05a --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/funcs.pyi @@ -0,0 +1,13 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +async def _run(waiter, aw) -> None: ... +async def wait_for(aw, timeout, sleep=...): ... +def wait_for_ms(aw, timeout): ... + +class _Remove: + @staticmethod + def remove(t) -> None: ... + +def gather(*aws, return_exceptions: bool = ...) -> Generator[None, None, Incomplete]: ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.py new file mode 100644 index 000000000..0a46ac326 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.py @@ -0,0 +1,55 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019-2020 Damien P. George + +from . import core + + +# Lock class for primitive mutex capability +class Lock: + def __init__(self): + # The state can take the following values: + # - 0: unlocked + # - 1: locked + # - : unlocked but this task has been scheduled to acquire the lock next + self.state = 0 + # Queue of Tasks waiting to acquire this Lock + self.waiting = core.TaskQueue() + + def locked(self): + return self.state == 1 + + def release(self): + if self.state != 1: + raise RuntimeError("Lock not acquired") + if self.waiting.peek(): + # Task(s) waiting on lock, schedule next Task + self.state = self.waiting.pop() + core._task_queue.push(self.state) + else: + # No Task waiting so unlock + self.state = 0 + + # async + def acquire(self): + if self.state != 0: + # Lock unavailable, put the calling Task on the waiting queue + self.waiting.push(core.cur_task) + # Set calling task's data to the lock's queue so it can be removed if needed + core.cur_task.data = self.waiting + try: + yield + except core.CancelledError as er: + if self.state == core.cur_task: + # Cancelled while pending on resume, schedule next waiting Task + self.state = 1 + self.release() + raise er + # Lock available, set it as locked + self.state = 1 + return True + + async def __aenter__(self): + return await self.acquire() + + async def __aexit__(self, exc_type, exc, tb): + return self.release() diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.pyi new file mode 100644 index 000000000..b89edeb63 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/lock.pyi @@ -0,0 +1,13 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Lock: + state: int + waiting: Incomplete + def __init__(self) -> None: ... + def locked(self): ... + def release(self) -> None: ... + def acquire(self) -> Generator[None, None, Incomplete]: ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.py b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.py new file mode 100644 index 000000000..c47c48cf0 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.py @@ -0,0 +1,189 @@ +# MicroPython asyncio module +# MIT license; Copyright (c) 2019-2020 Damien P. George + +from . import core + + +class Stream: + def __init__(self, s, e={}): + self.s = s + self.e = e + self.out_buf = b"" + + def get_extra_info(self, v): + return self.e[v] + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.close() + + def close(self): + pass + + async def wait_closed(self): + # TODO yield? + self.s.close() + + # async + def read(self, n=-1): + r = b"" + while True: + yield core._io_queue.queue_read(self.s) + r2 = self.s.read(n) + if r2 is not None: + if n >= 0: + return r2 + if not len(r2): + return r + r += r2 + + # async + def readinto(self, buf): + yield core._io_queue.queue_read(self.s) + return self.s.readinto(buf) + + # async + def readexactly(self, n): + r = b"" + while n: + yield core._io_queue.queue_read(self.s) + r2 = self.s.read(n) + if r2 is not None: + if not len(r2): + raise EOFError + r += r2 + n -= len(r2) + return r + + # async + def readline(self): + l = b"" + while True: + yield core._io_queue.queue_read(self.s) + l2 = self.s.readline() # may do multiple reads but won't block + l += l2 + if not l2 or l[-1] == 10: # \n (check l in case l2 is str) + return l + + def write(self, buf): + if not self.out_buf: + # Try to write immediately to the underlying stream. + ret = self.s.write(buf) + if ret == len(buf): + return + if ret is not None: + buf = buf[ret:] + self.out_buf += buf + + # async + def drain(self): + if not self.out_buf: + # Drain must always yield, so a tight loop of write+drain can't block the scheduler. + return (yield from core.sleep_ms(0)) + mv = memoryview(self.out_buf) + off = 0 + while off < len(mv): + yield core._io_queue.queue_write(self.s) + ret = self.s.write(mv[off:]) + if ret is not None: + off += ret + self.out_buf = b"" + + +# Stream can be used for both reading and writing to save code size +StreamReader = Stream +StreamWriter = Stream + + +# Create a TCP stream connection to a remote host +# +# async +def open_connection(host, port): + from errno import EINPROGRESS + import socket + + ai = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)[0] # TODO this is blocking! + s = socket.socket(ai[0], ai[1], ai[2]) + s.setblocking(False) + ss = Stream(s) + try: + s.connect(ai[-1]) + except OSError as er: + if er.errno != EINPROGRESS: + raise er + yield core._io_queue.queue_write(s) + return ss, ss + + +# Class representing a TCP stream server, can be closed and used in "async with" +class Server: + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + self.close() + await self.wait_closed() + + def close(self): + self.task.cancel() + + async def wait_closed(self): + await self.task + + async def _serve(self, s, cb): + # Accept incoming connections + while True: + try: + yield core._io_queue.queue_read(s) + except core.CancelledError: + # Shutdown server + s.close() + return + try: + s2, addr = s.accept() + except: + # Ignore a failed accept + continue + s2.setblocking(False) + s2s = Stream(s2, {"peername": addr}) + core.create_task(cb(s2s, s2s)) + + +# Helper function to start a TCP stream server, running as a new task +# TODO could use an accept-callback on socket read activity instead of creating a task +async def start_server(cb, host, port, backlog=5): + import socket + + # Create and bind server socket. + host = socket.getaddrinfo(host, port)[0] # TODO this is blocking! + s = socket.socket() + s.setblocking(False) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind(host[-1]) + s.listen(backlog) + + # Create and return server object and task. + srv = Server() + srv.task = core.create_task(srv._serve(s, cb)) + return srv + + +################################################################################ +# Legacy uasyncio compatibility + + +async def stream_awrite(self, buf, off=0, sz=-1): + if off != 0 or sz != -1: + buf = memoryview(buf) + if sz == -1: + sz = len(buf) + buf = buf[off : off + sz] + self.write(buf) + await self.drain() + + +Stream.aclose = Stream.wait_closed +Stream.awrite = stream_awrite +Stream.awritestr = stream_awrite # TODO explicitly convert to bytes? diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.pyi new file mode 100644 index 000000000..fb94e2692 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/asyncio/stream.pyi @@ -0,0 +1,35 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Stream: + s: Incomplete + e: Incomplete + out_buf: bytes + def __init__(self, s, e=...) -> None: ... + def get_extra_info(self, v): ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb) -> None: ... + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + def read(self, n: int = ...) -> Generator[Incomplete, None, Incomplete]: ... + def readinto(self, buf) -> Generator[Incomplete, None, Incomplete]: ... + def readexactly(self, n) -> Generator[Incomplete, None, Incomplete]: ... + def readline(self) -> Generator[Incomplete, None, Incomplete]: ... + def write(self, buf) -> None: ... + def drain(self) -> Generator[Incomplete, None, Incomplete]: ... + +StreamReader = Stream +StreamWriter = Stream + +def open_connection(host, port) -> Generator[Incomplete, None, Incomplete]: ... + +class Server: + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb) -> None: ... + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + async def _serve(self, s, cb) -> Generator[Incomplete, None, None]: ... + +async def start_server(cb, host, port, backlog: int = ...): ... +async def stream_awrite(self, buf, off: int = ..., sz: int = ...) -> None: ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/dht.py b/stubs/micropython-latest-frozen/samd/GENERIC/dht.py new file mode 100644 index 000000000..4624ae2ad --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/dht.py @@ -0,0 +1,47 @@ +# DHT11/DHT22 driver for MicroPython on ESP8266 +# MIT license; Copyright (c) 2016 Damien P. George + +import sys +import machine + +if hasattr(machine, "dht_readinto"): + from machine import dht_readinto +elif sys.platform.startswith("esp"): + from esp import dht_readinto +elif sys.platform == "pyboard": + from pyb import dht_readinto +else: + dht_readinto = __import__(sys.platform).dht_readinto + +del machine + + +class DHTBase: + def __init__(self, pin): + self.pin = pin + self.buf = bytearray(5) + + def measure(self): + buf = self.buf + dht_readinto(self.pin, buf) + if (buf[0] + buf[1] + buf[2] + buf[3]) & 0xFF != buf[4]: + raise Exception("checksum error") + + +class DHT11(DHTBase): + def humidity(self): + return self.buf[0] + + def temperature(self): + return self.buf[2] + + +class DHT22(DHTBase): + def humidity(self): + return (self.buf[0] << 8 | self.buf[1]) * 0.1 + + def temperature(self): + t = ((self.buf[2] & 0x7F) << 8 | self.buf[3]) * 0.1 + if self.buf[2] & 0x80: + t = -t + return t diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/dht.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/dht.pyi new file mode 100644 index 000000000..7ac764e6b --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/dht.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class DHTBase: + pin: Incomplete + buf: Incomplete + def __init__(self, pin) -> None: ... + def measure(self) -> None: ... + +class DHT11(DHTBase): + def humidity(self): ... + def temperature(self): ... + +class DHT22(DHTBase): + def humidity(self): ... + def temperature(self): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.py b/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.py new file mode 100644 index 000000000..ad2d9f52c --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.py @@ -0,0 +1,52 @@ +# DS18x20 temperature sensor driver for MicroPython. +# MIT license; Copyright (c) 2016 Damien P. George + +from micropython import const + +_CONVERT = const(0x44) +_RD_SCRATCH = const(0xBE) +_WR_SCRATCH = const(0x4E) + + +class DS18X20: + def __init__(self, onewire): + self.ow = onewire + self.buf = bytearray(9) + + def scan(self): + return [rom for rom in self.ow.scan() if rom[0] in (0x10, 0x22, 0x28)] + + def convert_temp(self): + self.ow.reset(True) + self.ow.writebyte(self.ow.SKIP_ROM) + self.ow.writebyte(_CONVERT) + + def read_scratch(self, rom): + self.ow.reset(True) + self.ow.select_rom(rom) + self.ow.writebyte(_RD_SCRATCH) + self.ow.readinto(self.buf) + if self.ow.crc8(self.buf): + raise Exception("CRC error") + return self.buf + + def write_scratch(self, rom, buf): + self.ow.reset(True) + self.ow.select_rom(rom) + self.ow.writebyte(_WR_SCRATCH) + self.ow.write(buf) + + def read_temp(self, rom): + buf = self.read_scratch(rom) + if rom[0] == 0x10: + if buf[1]: + t = buf[0] >> 1 | 0x80 + t = -((~t + 1) & 0xFF) + else: + t = buf[0] >> 1 + return t - 0.25 + (buf[7] - buf[6]) / buf[7] + else: + t = buf[1] << 8 | buf[0] + if t & 0x8000: # sign bit set + t = -((t ^ 0xFFFF) + 1) + return t / 16 diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.pyi new file mode 100644 index 000000000..43b201500 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/ds18x20.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +_CONVERT: Incomplete +_RD_SCRATCH: Incomplete +_WR_SCRATCH: Incomplete + +class DS18X20: + ow: Incomplete + buf: Incomplete + def __init__(self, onewire) -> None: ... + def scan(self): ... + def convert_temp(self) -> None: ... + def read_scratch(self, rom): ... + def write_scratch(self, rom, buf) -> None: ... + def read_temp(self, rom): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/modules.json b/stubs/micropython-latest-frozen/samd/GENERIC/modules.json index b4f1d29c1..a3494cb0a 100644 --- a/stubs/micropython-latest-frozen/samd/GENERIC/modules.json +++ b/stubs/micropython-latest-frozen/samd/GENERIC/modules.json @@ -19,6 +19,46 @@ { "file": "_boot.py", "module": "_boot" + }, + { + "file": "asyncio/__init__.py", + "module": "__init__" + }, + { + "file": "asyncio/core.py", + "module": "core" + }, + { + "file": "asyncio/event.py", + "module": "event" + }, + { + "file": "asyncio/funcs.py", + "module": "funcs" + }, + { + "file": "asyncio/lock.py", + "module": "lock" + }, + { + "file": "asyncio/stream.py", + "module": "stream" + }, + { + "file": "dht.py", + "module": "dht" + }, + { + "file": "ds18x20.py", + "module": "ds18x20" + }, + { + "file": "onewire.py", + "module": "onewire" + }, + { + "file": "uasyncio.py", + "module": "uasyncio" } ] } \ No newline at end of file diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/onewire.py b/stubs/micropython-latest-frozen/samd/GENERIC/onewire.py new file mode 100644 index 000000000..4c6da741c --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/onewire.py @@ -0,0 +1,92 @@ +# 1-Wire driver for MicroPython +# MIT license; Copyright (c) 2016 Damien P. George + +import _onewire as _ow + + +class OneWireError(Exception): + pass + + +class OneWire: + SEARCH_ROM = 0xF0 + MATCH_ROM = 0x55 + SKIP_ROM = 0xCC + + def __init__(self, pin): + self.pin = pin + self.pin.init(pin.OPEN_DRAIN, pin.PULL_UP) + + def reset(self, required=False): + reset = _ow.reset(self.pin) + if required and not reset: + raise OneWireError + return reset + + def readbit(self): + return _ow.readbit(self.pin) + + def readbyte(self): + return _ow.readbyte(self.pin) + + def readinto(self, buf): + for i in range(len(buf)): + buf[i] = _ow.readbyte(self.pin) + + def writebit(self, value): + return _ow.writebit(self.pin, value) + + def writebyte(self, value): + return _ow.writebyte(self.pin, value) + + def write(self, buf): + for b in buf: + _ow.writebyte(self.pin, b) + + def select_rom(self, rom): + self.reset() + self.writebyte(self.MATCH_ROM) + self.write(rom) + + def scan(self): + devices = [] + diff = 65 + rom = False + for i in range(0xFF): + rom, diff = self._search_rom(rom, diff) + if rom: + devices += [rom] + if diff == 0: + break + return devices + + def _search_rom(self, l_rom, diff): + if not self.reset(): + return None, 0 + self.writebyte(self.SEARCH_ROM) + if not l_rom: + l_rom = bytearray(8) + rom = bytearray(8) + next_diff = 0 + i = 64 + for byte in range(8): + r_b = 0 + for bit in range(8): + b = self.readbit() + if self.readbit(): + if b: # there are no devices or there is an error on the bus + return None, 0 + else: + if not b: # collision, two devices with different bit meaning + if diff > i or ((l_rom[byte] & (1 << bit)) and diff != i): + b = 1 + next_diff = i + self.writebit(b) + if b: + r_b |= 1 << bit + i -= 1 + rom[byte] = r_b + return rom, next_diff + + def crc8(self, data): + return _ow.crc8(data) diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/onewire.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/onewire.pyi new file mode 100644 index 000000000..5ca094b28 --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/onewire.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +class OneWireError(Exception): ... + +class OneWire: + SEARCH_ROM: int + MATCH_ROM: int + SKIP_ROM: int + pin: Incomplete + def __init__(self, pin) -> None: ... + def reset(self, required: bool = ...): ... + def readbit(self): ... + def readbyte(self): ... + def readinto(self, buf) -> None: ... + def writebit(self, value): ... + def writebyte(self, value): ... + def write(self, buf) -> None: ... + def select_rom(self, rom) -> None: ... + def scan(self): ... + def _search_rom(self, l_rom, diff): ... + def crc8(self, data): ... diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.py b/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.py new file mode 100644 index 000000000..67e6ddcfa --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.py @@ -0,0 +1,8 @@ +# This module just allows `import uasyncio` to work. It lazy-loads from +# `asyncio` without duplicating its globals dict. + + +def __getattr__(attr): + import asyncio + + return getattr(asyncio, attr) diff --git a/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.pyi b/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.pyi new file mode 100644 index 000000000..d53bcfbeb --- /dev/null +++ b/stubs/micropython-latest-frozen/samd/GENERIC/uasyncio.pyi @@ -0,0 +1 @@ +def __getattr__(attr): ...